diff --git a/backend/remote-state/azure/arm_client.go b/backend/remote-state/azure/arm_client.go
index ff6ff9200..8db63a6dd 100644
--- a/backend/remote-state/azure/arm_client.go
+++ b/backend/remote-state/azure/arm_client.go
@@ -13,7 +13,6 @@ import (
armStorage "github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/storage/mgmt/storage"
"github.com/Azure/azure-sdk-for-go/storage"
"github.com/Azure/go-autorest/autorest"
- "github.com/Azure/go-autorest/autorest/adal"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/hashicorp/go-azure-helpers/authentication"
"github.com/hashicorp/terraform/httpclient"
@@ -75,7 +74,7 @@ func buildArmClient(config BackendConfig) (*ArmClient, error) {
return nil, fmt.Errorf("Error building ARM Config: %+v", err)
}
- oauthConfig, err := adal.NewOAuthConfig(env.ActiveDirectoryEndpoint, armConfig.TenantID)
+ oauthConfig, err := armConfig.BuildOAuthConfig(env.ActiveDirectoryEndpoint)
if err != nil {
return nil, err
}
diff --git a/go.mod b/go.mod
index 825a3736e..88b124053 100644
--- a/go.mod
+++ b/go.mod
@@ -2,9 +2,9 @@ module github.com/hashicorp/terraform
require (
cloud.google.com/go v0.36.0
- github.com/Azure/azure-sdk-for-go v31.2.0+incompatible
- github.com/Azure/go-autorest/autorest v0.5.0
- github.com/Azure/go-autorest/autorest/adal v0.2.0
+ github.com/Azure/azure-sdk-for-go v32.5.0+incompatible
+ github.com/Azure/go-autorest/autorest v0.9.0
+ github.com/Azure/go-autorest/autorest/adal v0.6.0
github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292 // indirect
github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af // indirect
github.com/agext/levenshtein v1.2.2
@@ -43,10 +43,11 @@ require (
github.com/gorilla/websocket v1.4.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect
+ github.com/grpc-ecosystem/grpc-gateway v1.8.5 // indirect
github.com/hashicorp/aws-sdk-go-base v0.3.0
github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089
github.com/hashicorp/errwrap v1.0.0
- github.com/hashicorp/go-azure-helpers v0.5.0
+ github.com/hashicorp/go-azure-helpers v0.7.0
github.com/hashicorp/go-checkpoint v0.5.0
github.com/hashicorp/go-cleanhttp v0.5.0
github.com/hashicorp/go-getter v1.3.1-0.20190627223108-da0323b9545e
@@ -112,6 +113,7 @@ require (
github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557
github.com/zclconf/go-cty v1.0.1-0.20190708163926-19588f92a98f
github.com/zclconf/go-cty-yaml v1.0.1
+ go.opencensus.io v0.20.2 // indirect
go.uber.org/atomic v1.3.2 // indirect
go.uber.org/multierr v1.1.0 // indirect
go.uber.org/zap v1.9.1 // indirect
diff --git a/go.sum b/go.sum
index 22bea10e9..8a9a1f3c0 100644
--- a/go.sum
+++ b/go.sum
@@ -4,36 +4,37 @@ cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.36.0 h1:+aCSj7tOo2LODWVEuZDZeGCckdt6MlSF+X/rB3wUiS8=
cloud.google.com/go v0.36.0/go.mod h1:RUoy9p/M4ge0HzT8L+SDZ8jg+Q6fth0CiBuhFJpSV40=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12 h1:jGFvw3l57ViIVEPKKEUXPcLYIXJmQxLUh6ey1eJhwyc=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU=
dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU=
dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4=
dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
-github.com/Azure/azure-sdk-for-go v31.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
-github.com/Azure/azure-sdk-for-go v31.2.0+incompatible h1:kZFnTLmdQYNGfakatSivKHUfUnDZhqNdchHD4oIhp5k=
-github.com/Azure/azure-sdk-for-go v31.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
-github.com/Azure/go-autorest/autorest v0.3.0/go.mod h1:AKyIcETwSUFxIcs/Wnq/C+kwCtlEYGUVd7FPNb2slmg=
-github.com/Azure/go-autorest/autorest v0.5.0 h1:Mlm9qy2fpQ9MvfyI41G2Zf5B4CsgjjNbLOWszfK6KrY=
-github.com/Azure/go-autorest/autorest v0.5.0/go.mod h1:9HLKlQjVBH6U3oDfsXOeVc56THsLPw1L03yban4xThw=
-github.com/Azure/go-autorest/autorest/adal v0.1.0/go.mod h1:MeS4XhScH55IST095THyTxElntu7WqB7pNbZo8Q5G3E=
-github.com/Azure/go-autorest/autorest/adal v0.2.0 h1:7IBDu1jgh+ADHXnEYExkV9RE/ztOOlxdACkkPRthGKw=
-github.com/Azure/go-autorest/autorest/adal v0.2.0/go.mod h1:MeS4XhScH55IST095THyTxElntu7WqB7pNbZo8Q5G3E=
-github.com/Azure/go-autorest/autorest/azure/cli v0.1.0 h1:YTtBrcb6mhA+PoSW8WxFDoIIyjp13XqJeX80ssQtri4=
-github.com/Azure/go-autorest/autorest/azure/cli v0.1.0/go.mod h1:Dk8CUAt/b/PzkfeRsWzVG9Yj3ps8mS8ECztu43rdU8U=
+github.com/Azure/azure-sdk-for-go v32.5.0+incompatible h1:Hn/DsObfmw0M7dMGS/c0MlVrJuGFzHzOpBWL89acR68=
+github.com/Azure/azure-sdk-for-go v32.5.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
+github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
+github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
+github.com/Azure/go-autorest/autorest/adal v0.6.0 h1:UCTq22yE3RPgbU/8u4scfnnzuCW6pwQ9n+uBtV78ouo=
+github.com/Azure/go-autorest/autorest/adal v0.6.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
+github.com/Azure/go-autorest/autorest/azure/cli v0.2.0 h1:pSwNMF0qotgehbQNllUWwJ4V3vnrLKOzHrwDLEZK904=
+github.com/Azure/go-autorest/autorest/azure/cli v0.2.0/go.mod h1:WWTbGPvkAg3I4ms2j2s+Zr5xCGwGqTQh+6M2ZqOczkE=
github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
+github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM=
+github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
github.com/Azure/go-autorest/autorest/mocks v0.1.0 h1:Kx+AUU2Te+A3JIyYn6Dfs+cFgx5XorQKuIXrZGoq/SI=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
-github.com/Azure/go-autorest/autorest/to v0.2.0 h1:nQOZzFCudTh+TvquAtCRjM01VEYx85e9qbwt5ncW4L8=
-github.com/Azure/go-autorest/autorest/to v0.2.0/go.mod h1:GunWKJp1AEqgMaGLV+iocmRAJWqST1wQYhyyjXJ3SJc=
-github.com/Azure/go-autorest/autorest/validation v0.1.0 h1:ISSNzGUh+ZSzizJWOWzs8bwpXIePbGLW4z/AmUFGH5A=
-github.com/Azure/go-autorest/autorest/validation v0.1.0/go.mod h1:Ha3z/SqBeaalWQvokg3NZAlQTalVMtOIAs1aGK7G6u8=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.3.0 h1:qJumjCaCudz+OcqE9/XtEPfvtOjOmKaui4EOpFI6zZc=
+github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
+github.com/Azure/go-autorest/autorest/to v0.3.0 h1:zebkZaadz7+wIQYgC7GXaz3Wb28yKYfVkkBKwc38VF8=
+github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
+github.com/Azure/go-autorest/autorest/validation v0.2.0 h1:15vMO4y76dehZSq7pAaOLQxC6dZYsSrj2GQpflyM/L4=
+github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI=
github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
-github.com/Azure/go-autorest/tracing v0.1.0 h1:TRBxC5Pj/fIuh4Qob0ZpkggbfT8RC0SubHbpV3p4/Vc=
-github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 h1:pSm8mp0T2OH2CPmPDPtwHPr3VAQaOwVF/JbllOPP4xA=
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
@@ -99,8 +100,6 @@ github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/bsm/go-vlq v0.0.0-20150828105119-ec6e8d4f5f4e/go.mod h1:N+BjUcTjSxc2mtRGSCPsat1kze3CUtvJN3/jTXlp29k=
-github.com/census-instrumentation/opencensus-proto v0.2.0 h1:LzQXZOgg4CQfE6bFvXGM30YZL1WW/M337pXml+GrcZ4=
-github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s=
github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
@@ -211,8 +210,8 @@ github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089/go.mod h1:mFrjN1m
github.com/hashicorp/errwrap v0.0.0-20180715044906-d6c0cd880357/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
-github.com/hashicorp/go-azure-helpers v0.5.0 h1:GW5YJKeeMfyIEZjiVf84Av2W6FizMwS1OYLtDwDwah0=
-github.com/hashicorp/go-azure-helpers v0.5.0/go.mod h1:1kVoV5ZV0b/Wc/Rck7dKgW0MhmUrZiRxt/OnG42Yeow=
+github.com/hashicorp/go-azure-helpers v0.7.0 h1:wxGpOyWYp15bjBMeL3pXKP5X3oFLZbThAMcJcU6x4FA=
+github.com/hashicorp/go-azure-helpers v0.7.0/go.mod h1:3xdjhbL7qs69rnwxA0UENOzkPJjtTFIRb5aRyrEpbCU=
github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU=
github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg=
github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig=
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/.travis.yml b/vendor/contrib.go.opencensus.io/exporter/ocagent/.travis.yml
deleted file mode 100644
index ee417bbe6..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: go
-
-go:
- - 1.11.x
-
-go_import_path: contrib.go.opencensus.io/exporter/ocagent
-
-before_script:
- - GO_FILES=$(find . -iname '*.go' | grep -v /vendor/) # All the .go files, excluding vendor/ if any
- - PKGS=$(go list ./... | grep -v /vendor/) # All the import paths, excluding vendor/ if any
-
-script:
- - go build ./... # Ensure dependency updates don't break build
- - if [ -n "$(gofmt -s -l $GO_FILES)" ]; then echo "gofmt the following files:"; gofmt -s -l $GO_FILES; exit 1; fi
- - go vet ./...
- - GO111MODULE=on go test -v -race $PKGS # Run all the tests with the race detector enabled
- - GO111MODULE=off go test -v -race $PKGS # Make sure tests still pass when not using Go modules.
- - 'if [[ $TRAVIS_GO_VERSION = 1.8* ]]; then ! golint ./... | grep -vE "(_mock|_string|\.pb)\.go:"; fi'
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/CONTRIBUTING.md b/vendor/contrib.go.opencensus.io/exporter/ocagent/CONTRIBUTING.md
deleted file mode 100644
index 0786fdf43..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/CONTRIBUTING.md
+++ /dev/null
@@ -1,24 +0,0 @@
-# How to contribute
-
-We'd love to accept your patches and contributions to this project. There are
-just a few small guidelines you need to follow.
-
-## Contributor License Agreement
-
-Contributions to this project must be accompanied by a Contributor License
-Agreement. You (or your employer) retain the copyright to your contribution,
-this simply gives us permission to use and redistribute your contributions as
-part of the project. Head over to to see
-your current agreements on file or to sign a new one.
-
-You generally only need to submit a CLA once, so if you've already submitted one
-(even if it was for a different project), you probably don't need to do it
-again.
-
-## Code reviews
-
-All submissions, including submissions by project members, require review. We
-use GitHub pull requests for this purpose. Consult [GitHub Help] for more
-information on using pull requests.
-
-[GitHub Help]: https://help.github.com/articles/about-pull-requests/
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/LICENSE b/vendor/contrib.go.opencensus.io/exporter/ocagent/LICENSE
deleted file mode 100644
index 261eeb9e9..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/README.md b/vendor/contrib.go.opencensus.io/exporter/ocagent/README.md
deleted file mode 100644
index 3b9e908f5..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/README.md
+++ /dev/null
@@ -1,61 +0,0 @@
-# OpenCensus Agent Go Exporter
-
-[![Build Status][travis-image]][travis-url] [![GoDoc][godoc-image]][godoc-url]
-
-
-This repository contains the Go implementation of the OpenCensus Agent (OC-Agent) Exporter.
-OC-Agent is a deamon process running in a VM that can retrieve spans/stats/metrics from
-OpenCensus Library, export them to other backends and possibly push configurations back to
-Library. See more details on [OC-Agent Readme][OCAgentReadme].
-
-Note: This is an experimental repository and is likely to get backwards-incompatible changes.
-Ultimately we may want to move the OC-Agent Go Exporter to [OpenCensus Go core library][OpenCensusGo].
-
-## Installation
-
-```bash
-$ go get -u contrib.go.opencensus.io/exporter/ocagent
-```
-
-## Usage
-
-```go
-import (
- "context"
- "fmt"
- "log"
- "time"
-
- "contrib.go.opencensus.io/exporter/ocagent"
- "go.opencensus.io/trace"
-)
-
-func Example() {
- exp, err := ocagent.NewExporter(ocagent.WithInsecure(), ocagent.WithServiceName("your-service-name"))
- if err != nil {
- log.Fatalf("Failed to create the agent exporter: %v", err)
- }
- defer exp.Stop()
-
- // Now register it as a trace exporter.
- trace.RegisterExporter(exp)
-
- // Then use the OpenCensus tracing library, like we normally would.
- ctx, span := trace.StartSpan(context.Background(), "AgentExporter-Example")
- defer span.End()
-
- for i := 0; i < 10; i++ {
- _, iSpan := trace.StartSpan(ctx, fmt.Sprintf("Sample-%d", i))
- <-time.After(6 * time.Millisecond)
- iSpan.End()
- }
-}
-```
-
-[OCAgentReadme]: https://github.com/census-instrumentation/opencensus-proto/tree/master/opencensus/proto/agent#opencensus-agent-proto
-[OpenCensusGo]: https://github.com/census-instrumentation/opencensus-go
-[godoc-image]: https://godoc.org/contrib.go.opencensus.io/exporter/ocagent?status.svg
-[godoc-url]: https://godoc.org/contrib.go.opencensus.io/exporter/ocagent
-[travis-image]: https://travis-ci.org/census-ecosystem/opencensus-go-exporter-ocagent.svg?branch=master
-[travis-url]: https://travis-ci.org/census-ecosystem/opencensus-go-exporter-ocagent
-
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/common.go b/vendor/contrib.go.opencensus.io/exporter/ocagent/common.go
deleted file mode 100644
index 297e44b6e..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/common.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocagent
-
-import (
- "math/rand"
- "time"
-)
-
-var randSrc = rand.New(rand.NewSource(time.Now().UnixNano()))
-
-// retries function fn upto n times, if fn returns an error lest it returns nil early.
-// It applies exponential backoff in units of (1< 0 {
- ctx = metadata.NewOutgoingContext(ctx, metadata.New(ae.headers))
- }
- traceExporter, err := traceSvcClient.Export(ctx)
- if err != nil {
- return fmt.Errorf("Exporter.Start:: TraceServiceClient: %v", err)
- }
-
- firstTraceMessage := &agenttracepb.ExportTraceServiceRequest{
- Node: node,
- Resource: ae.resource,
- }
- if err := traceExporter.Send(firstTraceMessage); err != nil {
- return fmt.Errorf("Exporter.Start:: Failed to initiate the Config service: %v", err)
- }
-
- ae.mu.Lock()
- ae.traceExporter = traceExporter
- ae.mu.Unlock()
-
- // Initiate the config service by sending over node identifier info.
- configStream, err := traceSvcClient.Config(context.Background())
- if err != nil {
- return fmt.Errorf("Exporter.Start:: ConfigStream: %v", err)
- }
- firstCfgMessage := &agenttracepb.CurrentLibraryConfig{Node: node}
- if err := configStream.Send(firstCfgMessage); err != nil {
- return fmt.Errorf("Exporter.Start:: Failed to initiate the Config service: %v", err)
- }
-
- // In the background, handle trace configurations that are beamed down
- // by the agent, but also reply to it with the applied configuration.
- go ae.handleConfigStreaming(configStream)
-
- return nil
-}
-
-func (ae *Exporter) createMetricsServiceConnection(cc *grpc.ClientConn, node *commonpb.Node) error {
- metricsSvcClient := agentmetricspb.NewMetricsServiceClient(cc)
- metricsExporter, err := metricsSvcClient.Export(context.Background())
- if err != nil {
- return fmt.Errorf("MetricsExporter: failed to start the service client: %v", err)
- }
- // Initiate the metrics service by sending over the first message just containing the Node and Resource.
- firstMetricsMessage := &agentmetricspb.ExportMetricsServiceRequest{
- Node: node,
- Resource: ae.resource,
- }
- if err := metricsExporter.Send(firstMetricsMessage); err != nil {
- return fmt.Errorf("MetricsExporter:: failed to send the first message: %v", err)
- }
-
- ae.mu.Lock()
- ae.metricsExporter = metricsExporter
- ae.mu.Unlock()
-
- // With that we are good to go and can start sending metrics
- return nil
-}
-
-func (ae *Exporter) dialToAgent() (*grpc.ClientConn, error) {
- addr := ae.prepareAgentAddress()
- var dialOpts []grpc.DialOption
- if ae.clientTransportCredentials != nil {
- dialOpts = append(dialOpts, grpc.WithTransportCredentials(ae.clientTransportCredentials))
- } else if ae.canDialInsecure {
- dialOpts = append(dialOpts, grpc.WithInsecure())
- }
- if ae.compressor != "" {
- dialOpts = append(dialOpts, grpc.WithDefaultCallOptions(grpc.UseCompressor(ae.compressor)))
- }
- dialOpts = append(dialOpts, grpc.WithStatsHandler(&ocgrpc.ClientHandler{}))
-
- ctx := context.Background()
- if len(ae.headers) > 0 {
- ctx = metadata.NewOutgoingContext(ctx, metadata.New(ae.headers))
- }
- return grpc.DialContext(ctx, addr, dialOpts...)
-}
-
-func (ae *Exporter) handleConfigStreaming(configStream agenttracepb.TraceService_ConfigClient) error {
- // Note: We haven't yet implemented configuration sending so we
- // should NOT be changing connection states within this function for now.
- for {
- recv, err := configStream.Recv()
- if err != nil {
- // TODO: Check if this is a transient error or exponential backoff-able.
- return err
- }
- cfg := recv.Config
- if cfg == nil {
- continue
- }
-
- // Otherwise now apply the trace configuration sent down from the agent
- if psamp := cfg.GetProbabilitySampler(); psamp != nil {
- trace.ApplyConfig(trace.Config{DefaultSampler: trace.ProbabilitySampler(psamp.SamplingProbability)})
- } else if csamp := cfg.GetConstantSampler(); csamp != nil {
- alwaysSample := csamp.Decision == tracepb.ConstantSampler_ALWAYS_ON
- if alwaysSample {
- trace.ApplyConfig(trace.Config{DefaultSampler: trace.AlwaysSample()})
- } else {
- trace.ApplyConfig(trace.Config{DefaultSampler: trace.NeverSample()})
- }
- } else { // TODO: Add the rate limiting sampler here
- }
-
- // Then finally send back to upstream the newly applied configuration
- err = configStream.Send(&agenttracepb.CurrentLibraryConfig{Config: &tracepb.TraceConfig{Sampler: cfg.Sampler}})
- if err != nil {
- return err
- }
- }
-}
-
-// Stop shuts down all the connections and resources
-// related to the exporter.
-func (ae *Exporter) Stop() error {
- ae.mu.RLock()
- cc := ae.grpcClientConn
- started := ae.started
- stopped := ae.stopped
- ae.mu.RUnlock()
-
- if !started {
- return errNotStarted
- }
- if stopped {
- // TODO: tell the user that we've already stopped, so perhaps a sentinel error?
- return nil
- }
-
- ae.Flush()
-
- // Now close the underlying gRPC connection.
- var err error
- if cc != nil {
- err = cc.Close()
- }
-
- // At this point we can change the state variables: started and stopped
- ae.mu.Lock()
- ae.started = false
- ae.stopped = true
- ae.mu.Unlock()
- close(ae.stopCh)
-
- // Ensure that the backgroundConnector returns
- <-ae.backgroundConnectionDoneCh
-
- return err
-}
-
-func (ae *Exporter) ExportSpan(sd *trace.SpanData) {
- if sd == nil {
- return
- }
- _ = ae.traceBundler.Add(sd, 1)
-}
-
-func (ae *Exporter) ExportTraceServiceRequest(batch *agenttracepb.ExportTraceServiceRequest) error {
- if batch == nil || len(batch.Spans) == 0 {
- return nil
- }
-
- select {
- case <-ae.stopCh:
- return errStopped
-
- default:
- if !ae.connected() {
- return errNoConnection
- }
-
- ae.senderMu.Lock()
- err := ae.traceExporter.Send(batch)
- ae.senderMu.Unlock()
- if err != nil {
- ae.setStateDisconnected()
- return err
- }
- return nil
- }
-}
-
-func (ae *Exporter) ExportView(vd *view.Data) {
- if vd == nil {
- return
- }
- _ = ae.viewDataBundler.Add(vd, 1)
-}
-
-func ocSpanDataToPbSpans(sdl []*trace.SpanData) []*tracepb.Span {
- if len(sdl) == 0 {
- return nil
- }
- protoSpans := make([]*tracepb.Span, 0, len(sdl))
- for _, sd := range sdl {
- if sd != nil {
- protoSpans = append(protoSpans, ocSpanToProtoSpan(sd))
- }
- }
- return protoSpans
-}
-
-func (ae *Exporter) uploadTraces(sdl []*trace.SpanData) {
- select {
- case <-ae.stopCh:
- return
-
- default:
- if !ae.connected() {
- return
- }
-
- protoSpans := ocSpanDataToPbSpans(sdl)
- if len(protoSpans) == 0 {
- return
- }
- ae.senderMu.Lock()
- err := ae.traceExporter.Send(&agenttracepb.ExportTraceServiceRequest{
- Spans: protoSpans,
- })
- ae.senderMu.Unlock()
- if err != nil {
- ae.setStateDisconnected()
- }
- }
-}
-
-func ocViewDataToPbMetrics(vdl []*view.Data) []*metricspb.Metric {
- if len(vdl) == 0 {
- return nil
- }
- metrics := make([]*metricspb.Metric, 0, len(vdl))
- for _, vd := range vdl {
- if vd != nil {
- vmetric, err := viewDataToMetric(vd)
- // TODO: (@odeke-em) somehow report this error, if it is non-nil.
- if err == nil && vmetric != nil {
- metrics = append(metrics, vmetric)
- }
- }
- }
- return metrics
-}
-
-func (ae *Exporter) uploadViewData(vdl []*view.Data) {
- select {
- case <-ae.stopCh:
- return
-
- default:
- if !ae.connected() {
- return
- }
-
- protoMetrics := ocViewDataToPbMetrics(vdl)
- if len(protoMetrics) == 0 {
- return
- }
- err := ae.metricsExporter.Send(&agentmetricspb.ExportMetricsServiceRequest{
- Metrics: protoMetrics,
- // TODO:(@odeke-em)
- // a) Figure out how to derive a Node from the environment
- // b) Figure out how to derive a Resource from the environment
- // or better letting users of the exporter configure it.
- })
- if err != nil {
- ae.setStateDisconnected()
- }
- }
-}
-
-func (ae *Exporter) Flush() {
- ae.traceBundler.Flush()
- ae.viewDataBundler.Flush()
-}
-
-func resourceProtoFromEnv() *resourcepb.Resource {
- rs, _ := resource.FromEnv(context.Background())
- if rs == nil {
- return nil
- }
-
- rprs := &resourcepb.Resource{
- Type: rs.Type,
- }
- if rs.Labels != nil {
- rprs.Labels = make(map[string]string)
- for k, v := range rs.Labels {
- rprs.Labels[k] = v
- }
- }
- return rprs
-}
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/options.go b/vendor/contrib.go.opencensus.io/exporter/ocagent/options.go
deleted file mode 100644
index 3e05ae8b3..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/options.go
+++ /dev/null
@@ -1,128 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocagent
-
-import (
- "time"
-
- "google.golang.org/grpc/credentials"
-)
-
-const (
- DefaultAgentPort uint16 = 55678
- DefaultAgentHost string = "localhost"
-)
-
-type ExporterOption interface {
- withExporter(e *Exporter)
-}
-
-type insecureGrpcConnection int
-
-var _ ExporterOption = (*insecureGrpcConnection)(nil)
-
-func (igc *insecureGrpcConnection) withExporter(e *Exporter) {
- e.canDialInsecure = true
-}
-
-// WithInsecure disables client transport security for the exporter's gRPC connection
-// just like grpc.WithInsecure() https://godoc.org/google.golang.org/grpc#WithInsecure
-// does. Note, by default, client security is required unless WithInsecure is used.
-func WithInsecure() ExporterOption { return new(insecureGrpcConnection) }
-
-type addressSetter string
-
-func (as addressSetter) withExporter(e *Exporter) {
- e.agentAddress = string(as)
-}
-
-var _ ExporterOption = (*addressSetter)(nil)
-
-// WithAddress allows one to set the address that the exporter will
-// connect to the agent on. If unset, it will instead try to use
-// connect to DefaultAgentHost:DefaultAgentPort
-func WithAddress(addr string) ExporterOption {
- return addressSetter(addr)
-}
-
-type serviceNameSetter string
-
-func (sns serviceNameSetter) withExporter(e *Exporter) {
- e.serviceName = string(sns)
-}
-
-var _ ExporterOption = (*serviceNameSetter)(nil)
-
-// WithServiceName allows one to set/override the service name
-// that the exporter will report to the agent.
-func WithServiceName(serviceName string) ExporterOption {
- return serviceNameSetter(serviceName)
-}
-
-type reconnectionPeriod time.Duration
-
-func (rp reconnectionPeriod) withExporter(e *Exporter) {
- e.reconnectionPeriod = time.Duration(rp)
-}
-
-func WithReconnectionPeriod(rp time.Duration) ExporterOption {
- return reconnectionPeriod(rp)
-}
-
-type compressorSetter string
-
-func (c compressorSetter) withExporter(e *Exporter) {
- e.compressor = string(c)
-}
-
-// UseCompressor will set the compressor for the gRPC client to use when sending requests.
-// It is the responsibility of the caller to ensure that the compressor set has been registered
-// with google.golang.org/grpc/encoding. This can be done by encoding.RegisterCompressor. Some
-// compressors auto-register on import, such as gzip, which can be registered by calling
-// `import _ "google.golang.org/grpc/encoding/gzip"`
-func UseCompressor(compressorName string) ExporterOption {
- return compressorSetter(compressorName)
-}
-
-type headerSetter map[string]string
-
-func (h headerSetter) withExporter(e *Exporter) {
- e.headers = map[string]string(h)
-}
-
-// WithHeaders will send the provided headers when the gRPC stream connection
-// is instantiated
-func WithHeaders(headers map[string]string) ExporterOption {
- return headerSetter(headers)
-}
-
-type clientCredentials struct {
- credentials.TransportCredentials
-}
-
-var _ ExporterOption = (*clientCredentials)(nil)
-
-// WithTLSCredentials allows the connection to use TLS credentials
-// when talking to the server. It takes in grpc.TransportCredentials instead
-// of say a Certificate file or a tls.Certificate, because the retrieving
-// these credentials can be done in many ways e.g. plain file, in code tls.Config
-// or by certificate rotation, so it is up to the caller to decide what to use.
-func WithTLSCredentials(creds credentials.TransportCredentials) ExporterOption {
- return &clientCredentials{TransportCredentials: creds}
-}
-
-func (cc *clientCredentials) withExporter(e *Exporter) {
- e.clientTransportCredentials = cc.TransportCredentials
-}
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_spans.go b/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_spans.go
deleted file mode 100644
index 983ebe7b7..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_spans.go
+++ /dev/null
@@ -1,248 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocagent
-
-import (
- "math"
- "time"
-
- "go.opencensus.io/trace"
- "go.opencensus.io/trace/tracestate"
-
- tracepb "github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1"
- "github.com/golang/protobuf/ptypes/timestamp"
-)
-
-const (
- maxAnnotationEventsPerSpan = 32
- maxMessageEventsPerSpan = 128
-)
-
-func ocSpanToProtoSpan(sd *trace.SpanData) *tracepb.Span {
- if sd == nil {
- return nil
- }
- var namePtr *tracepb.TruncatableString
- if sd.Name != "" {
- namePtr = &tracepb.TruncatableString{Value: sd.Name}
- }
- return &tracepb.Span{
- TraceId: sd.TraceID[:],
- SpanId: sd.SpanID[:],
- ParentSpanId: sd.ParentSpanID[:],
- Status: ocStatusToProtoStatus(sd.Status),
- StartTime: timeToTimestamp(sd.StartTime),
- EndTime: timeToTimestamp(sd.EndTime),
- Links: ocLinksToProtoLinks(sd.Links),
- Kind: ocSpanKindToProtoSpanKind(sd.SpanKind),
- Name: namePtr,
- Attributes: ocAttributesToProtoAttributes(sd.Attributes),
- TimeEvents: ocTimeEventsToProtoTimeEvents(sd.Annotations, sd.MessageEvents),
- Tracestate: ocTracestateToProtoTracestate(sd.Tracestate),
- }
-}
-
-var blankStatus trace.Status
-
-func ocStatusToProtoStatus(status trace.Status) *tracepb.Status {
- if status == blankStatus {
- return nil
- }
- return &tracepb.Status{
- Code: status.Code,
- Message: status.Message,
- }
-}
-
-func ocLinksToProtoLinks(links []trace.Link) *tracepb.Span_Links {
- if len(links) == 0 {
- return nil
- }
-
- sl := make([]*tracepb.Span_Link, 0, len(links))
- for _, ocLink := range links {
- // This redefinition is necessary to prevent ocLink.*ID[:] copies
- // being reused -- in short we need a new ocLink per iteration.
- ocLink := ocLink
-
- sl = append(sl, &tracepb.Span_Link{
- TraceId: ocLink.TraceID[:],
- SpanId: ocLink.SpanID[:],
- Type: ocLinkTypeToProtoLinkType(ocLink.Type),
- })
- }
-
- return &tracepb.Span_Links{
- Link: sl,
- }
-}
-
-func ocLinkTypeToProtoLinkType(oct trace.LinkType) tracepb.Span_Link_Type {
- switch oct {
- case trace.LinkTypeChild:
- return tracepb.Span_Link_CHILD_LINKED_SPAN
- case trace.LinkTypeParent:
- return tracepb.Span_Link_PARENT_LINKED_SPAN
- default:
- return tracepb.Span_Link_TYPE_UNSPECIFIED
- }
-}
-
-func ocAttributesToProtoAttributes(attrs map[string]interface{}) *tracepb.Span_Attributes {
- if len(attrs) == 0 {
- return nil
- }
- outMap := make(map[string]*tracepb.AttributeValue)
- for k, v := range attrs {
- switch v := v.(type) {
- case bool:
- outMap[k] = &tracepb.AttributeValue{Value: &tracepb.AttributeValue_BoolValue{BoolValue: v}}
-
- case int:
- outMap[k] = &tracepb.AttributeValue{Value: &tracepb.AttributeValue_IntValue{IntValue: int64(v)}}
-
- case int64:
- outMap[k] = &tracepb.AttributeValue{Value: &tracepb.AttributeValue_IntValue{IntValue: v}}
-
- case string:
- outMap[k] = &tracepb.AttributeValue{
- Value: &tracepb.AttributeValue_StringValue{
- StringValue: &tracepb.TruncatableString{Value: v},
- },
- }
- }
- }
- return &tracepb.Span_Attributes{
- AttributeMap: outMap,
- }
-}
-
-// This code is mostly copied from
-// https://github.com/census-ecosystem/opencensus-go-exporter-stackdriver/blob/master/trace_proto.go#L46
-func ocTimeEventsToProtoTimeEvents(as []trace.Annotation, es []trace.MessageEvent) *tracepb.Span_TimeEvents {
- if len(as) == 0 && len(es) == 0 {
- return nil
- }
-
- timeEvents := &tracepb.Span_TimeEvents{}
- var annotations, droppedAnnotationsCount int
- var messageEvents, droppedMessageEventsCount int
-
- // Transform annotations
- for i, a := range as {
- if annotations >= maxAnnotationEventsPerSpan {
- droppedAnnotationsCount = len(as) - i
- break
- }
- annotations++
- timeEvents.TimeEvent = append(timeEvents.TimeEvent,
- &tracepb.Span_TimeEvent{
- Time: timeToTimestamp(a.Time),
- Value: transformAnnotationToTimeEvent(&a),
- },
- )
- }
-
- // Transform message events
- for i, e := range es {
- if messageEvents >= maxMessageEventsPerSpan {
- droppedMessageEventsCount = len(es) - i
- break
- }
- messageEvents++
- timeEvents.TimeEvent = append(timeEvents.TimeEvent,
- &tracepb.Span_TimeEvent{
- Time: timeToTimestamp(e.Time),
- Value: transformMessageEventToTimeEvent(&e),
- },
- )
- }
-
- // Process dropped counter
- timeEvents.DroppedAnnotationsCount = clip32(droppedAnnotationsCount)
- timeEvents.DroppedMessageEventsCount = clip32(droppedMessageEventsCount)
-
- return timeEvents
-}
-
-func transformAnnotationToTimeEvent(a *trace.Annotation) *tracepb.Span_TimeEvent_Annotation_ {
- return &tracepb.Span_TimeEvent_Annotation_{
- Annotation: &tracepb.Span_TimeEvent_Annotation{
- Description: &tracepb.TruncatableString{Value: a.Message},
- Attributes: ocAttributesToProtoAttributes(a.Attributes),
- },
- }
-}
-
-func transformMessageEventToTimeEvent(e *trace.MessageEvent) *tracepb.Span_TimeEvent_MessageEvent_ {
- return &tracepb.Span_TimeEvent_MessageEvent_{
- MessageEvent: &tracepb.Span_TimeEvent_MessageEvent{
- Type: tracepb.Span_TimeEvent_MessageEvent_Type(e.EventType),
- Id: uint64(e.MessageID),
- UncompressedSize: uint64(e.UncompressedByteSize),
- CompressedSize: uint64(e.CompressedByteSize),
- },
- }
-}
-
-// clip32 clips an int to the range of an int32.
-func clip32(x int) int32 {
- if x < math.MinInt32 {
- return math.MinInt32
- }
- if x > math.MaxInt32 {
- return math.MaxInt32
- }
- return int32(x)
-}
-
-func timeToTimestamp(t time.Time) *timestamp.Timestamp {
- nanoTime := t.UnixNano()
- return ×tamp.Timestamp{
- Seconds: nanoTime / 1e9,
- Nanos: int32(nanoTime % 1e9),
- }
-}
-
-func ocSpanKindToProtoSpanKind(kind int) tracepb.Span_SpanKind {
- switch kind {
- case trace.SpanKindClient:
- return tracepb.Span_CLIENT
- case trace.SpanKindServer:
- return tracepb.Span_SERVER
- default:
- return tracepb.Span_SPAN_KIND_UNSPECIFIED
- }
-}
-
-func ocTracestateToProtoTracestate(ts *tracestate.Tracestate) *tracepb.Span_Tracestate {
- if ts == nil {
- return nil
- }
- return &tracepb.Span_Tracestate{
- Entries: ocTracestateEntriesToProtoTracestateEntries(ts.Entries()),
- }
-}
-
-func ocTracestateEntriesToProtoTracestateEntries(entries []tracestate.Entry) []*tracepb.Span_Tracestate_Entry {
- protoEntries := make([]*tracepb.Span_Tracestate_Entry, 0, len(entries))
- for _, entry := range entries {
- protoEntries = append(protoEntries, &tracepb.Span_Tracestate_Entry{
- Key: entry.Key,
- Value: entry.Value,
- })
- }
- return protoEntries
-}
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_stats_to_metrics.go b/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_stats_to_metrics.go
deleted file mode 100644
index 43f18dec1..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/transform_stats_to_metrics.go
+++ /dev/null
@@ -1,274 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocagent
-
-import (
- "errors"
- "time"
-
- "go.opencensus.io/stats"
- "go.opencensus.io/stats/view"
- "go.opencensus.io/tag"
-
- "github.com/golang/protobuf/ptypes/timestamp"
-
- metricspb "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
-)
-
-var (
- errNilMeasure = errors.New("expecting a non-nil stats.Measure")
- errNilView = errors.New("expecting a non-nil view.View")
- errNilViewData = errors.New("expecting a non-nil view.Data")
-)
-
-func viewDataToMetric(vd *view.Data) (*metricspb.Metric, error) {
- if vd == nil {
- return nil, errNilViewData
- }
-
- descriptor, err := viewToMetricDescriptor(vd.View)
- if err != nil {
- return nil, err
- }
-
- timeseries, err := viewDataToTimeseries(vd)
- if err != nil {
- return nil, err
- }
-
- metric := &metricspb.Metric{
- MetricDescriptor: descriptor,
- Timeseries: timeseries,
- }
- return metric, nil
-}
-
-func viewToMetricDescriptor(v *view.View) (*metricspb.MetricDescriptor, error) {
- if v == nil {
- return nil, errNilView
- }
- if v.Measure == nil {
- return nil, errNilMeasure
- }
-
- desc := &metricspb.MetricDescriptor{
- Name: stringOrCall(v.Name, v.Measure.Name),
- Description: stringOrCall(v.Description, v.Measure.Description),
- Unit: v.Measure.Unit(),
- Type: aggregationToMetricDescriptorType(v),
- LabelKeys: tagKeysToLabelKeys(v.TagKeys),
- }
- return desc, nil
-}
-
-func stringOrCall(first string, call func() string) string {
- if first != "" {
- return first
- }
- return call()
-}
-
-type measureType uint
-
-const (
- measureUnknown measureType = iota
- measureInt64
- measureFloat64
-)
-
-func measureTypeFromMeasure(m stats.Measure) measureType {
- switch m.(type) {
- default:
- return measureUnknown
- case *stats.Float64Measure:
- return measureFloat64
- case *stats.Int64Measure:
- return measureInt64
- }
-}
-
-func aggregationToMetricDescriptorType(v *view.View) metricspb.MetricDescriptor_Type {
- if v == nil || v.Aggregation == nil {
- return metricspb.MetricDescriptor_UNSPECIFIED
- }
- if v.Measure == nil {
- return metricspb.MetricDescriptor_UNSPECIFIED
- }
-
- switch v.Aggregation.Type {
- case view.AggTypeCount:
- // Cumulative on int64
- return metricspb.MetricDescriptor_CUMULATIVE_INT64
-
- case view.AggTypeDistribution:
- // Cumulative types
- return metricspb.MetricDescriptor_CUMULATIVE_DISTRIBUTION
-
- case view.AggTypeLastValue:
- // Gauge types
- switch measureTypeFromMeasure(v.Measure) {
- case measureFloat64:
- return metricspb.MetricDescriptor_GAUGE_DOUBLE
- case measureInt64:
- return metricspb.MetricDescriptor_GAUGE_INT64
- }
-
- case view.AggTypeSum:
- // Cumulative types
- switch measureTypeFromMeasure(v.Measure) {
- case measureFloat64:
- return metricspb.MetricDescriptor_CUMULATIVE_DOUBLE
- case measureInt64:
- return metricspb.MetricDescriptor_CUMULATIVE_INT64
- }
- }
-
- // For all other cases, return unspecified.
- return metricspb.MetricDescriptor_UNSPECIFIED
-}
-
-func tagKeysToLabelKeys(tagKeys []tag.Key) []*metricspb.LabelKey {
- labelKeys := make([]*metricspb.LabelKey, 0, len(tagKeys))
- for _, tagKey := range tagKeys {
- labelKeys = append(labelKeys, &metricspb.LabelKey{
- Key: tagKey.Name(),
- })
- }
- return labelKeys
-}
-
-func viewDataToTimeseries(vd *view.Data) ([]*metricspb.TimeSeries, error) {
- if vd == nil || len(vd.Rows) == 0 {
- return nil, nil
- }
-
- // Given that view.Data only contains Start, End
- // the timestamps for all the row data will be the exact same
- // per aggregation. However, the values will differ.
- // Each row has its own tags.
- startTimestamp := timeToProtoTimestamp(vd.Start)
- endTimestamp := timeToProtoTimestamp(vd.End)
-
- mType := measureTypeFromMeasure(vd.View.Measure)
- timeseries := make([]*metricspb.TimeSeries, 0, len(vd.Rows))
- // It is imperative that the ordering of "LabelValues" matches those
- // of the Label keys in the metric descriptor.
- for _, row := range vd.Rows {
- labelValues := labelValuesFromTags(row.Tags)
- point := rowToPoint(vd.View, row, endTimestamp, mType)
- timeseries = append(timeseries, &metricspb.TimeSeries{
- StartTimestamp: startTimestamp,
- LabelValues: labelValues,
- Points: []*metricspb.Point{point},
- })
- }
-
- if len(timeseries) == 0 {
- return nil, nil
- }
-
- return timeseries, nil
-}
-
-func timeToProtoTimestamp(t time.Time) *timestamp.Timestamp {
- unixNano := t.UnixNano()
- return ×tamp.Timestamp{
- Seconds: int64(unixNano / 1e9),
- Nanos: int32(unixNano % 1e9),
- }
-}
-
-func rowToPoint(v *view.View, row *view.Row, endTimestamp *timestamp.Timestamp, mType measureType) *metricspb.Point {
- pt := &metricspb.Point{
- Timestamp: endTimestamp,
- }
-
- switch data := row.Data.(type) {
- case *view.CountData:
- pt.Value = &metricspb.Point_Int64Value{Int64Value: data.Value}
-
- case *view.DistributionData:
- pt.Value = &metricspb.Point_DistributionValue{
- DistributionValue: &metricspb.DistributionValue{
- Count: data.Count,
- Sum: float64(data.Count) * data.Mean, // because Mean := Sum/Count
- // TODO: Add Exemplar
- Buckets: bucketsToProtoBuckets(data.CountPerBucket),
- BucketOptions: &metricspb.DistributionValue_BucketOptions{
- Type: &metricspb.DistributionValue_BucketOptions_Explicit_{
- Explicit: &metricspb.DistributionValue_BucketOptions_Explicit{
- Bounds: v.Aggregation.Buckets,
- },
- },
- },
- SumOfSquaredDeviation: data.SumOfSquaredDev,
- }}
-
- case *view.LastValueData:
- setPointValue(pt, data.Value, mType)
-
- case *view.SumData:
- setPointValue(pt, data.Value, mType)
- }
-
- return pt
-}
-
-// Not returning anything from this function because metricspb.Point.is_Value is an unexported
-// interface hence we just have to set its value by pointer.
-func setPointValue(pt *metricspb.Point, value float64, mType measureType) {
- if mType == measureInt64 {
- pt.Value = &metricspb.Point_Int64Value{Int64Value: int64(value)}
- } else {
- pt.Value = &metricspb.Point_DoubleValue{DoubleValue: value}
- }
-}
-
-func bucketsToProtoBuckets(countPerBucket []int64) []*metricspb.DistributionValue_Bucket {
- distBuckets := make([]*metricspb.DistributionValue_Bucket, len(countPerBucket))
- for i := 0; i < len(countPerBucket); i++ {
- count := countPerBucket[i]
-
- distBuckets[i] = &metricspb.DistributionValue_Bucket{
- Count: count,
- }
- }
-
- return distBuckets
-}
-
-func labelValuesFromTags(tags []tag.Tag) []*metricspb.LabelValue {
- if len(tags) == 0 {
- return nil
- }
-
- labelValues := make([]*metricspb.LabelValue, 0, len(tags))
- for _, tag_ := range tags {
- labelValues = append(labelValues, &metricspb.LabelValue{
- Value: tag_.Value,
-
- // It is imperative that we set the "HasValue" attribute,
- // in order to distinguish missing a label from the empty string.
- // https://godoc.org/github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1#LabelValue.HasValue
- //
- // OpenCensus-Go uses non-pointers for tags as seen by this function's arguments,
- // so the best case that we can use to distinguish missing labels/tags from the
- // empty string is by checking if the Tag.Key.Name() != "" to indicate that we have
- // a value.
- HasValue: tag_.Key.Name() != "",
- })
- }
- return labelValues
-}
diff --git a/vendor/contrib.go.opencensus.io/exporter/ocagent/version.go b/vendor/contrib.go.opencensus.io/exporter/ocagent/version.go
deleted file mode 100644
index 68be4c75b..000000000
--- a/vendor/contrib.go.opencensus.io/exporter/ocagent/version.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocagent
-
-const Version = "0.0.1"
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deploymentoperations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deploymentoperations.go
index 67d7f6a30..3047aa00b 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deploymentoperations.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deploymentoperations.go
@@ -111,8 +111,8 @@ func (client DeploymentOperationsClient) GetPreparer(ctx context.Context, resour
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentOperationsClient) GetSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
@@ -201,8 +201,8 @@ func (client DeploymentOperationsClient) ListPreparer(ctx context.Context, resou
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentOperationsClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deployments.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deployments.go
index 54ddcb2c2..13f3f58a3 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deployments.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/deployments.go
@@ -109,8 +109,8 @@ func (client DeploymentsClient) CancelPreparer(ctx context.Context, resourceGrou
// CancelSender sends the Cancel request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) CancelSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CancelResponder handles the response to the Cancel request. The method always
@@ -193,8 +193,8 @@ func (client DeploymentsClient) CheckExistencePreparer(ctx context.Context, reso
// CheckExistenceSender sends the CheckExistence request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) CheckExistenceSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CheckExistenceResponder handles the response to the CheckExistence request. The method always
@@ -281,9 +281,9 @@ func (client DeploymentsClient) CreateOrUpdatePreparer(ctx context.Context, reso
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) CreateOrUpdateSender(req *http.Request) (future DeploymentsCreateOrUpdateFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
@@ -366,9 +366,9 @@ func (client DeploymentsClient) DeletePreparer(ctx context.Context, resourceGrou
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) DeleteSender(req *http.Request) (future DeploymentsDeleteFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
@@ -456,8 +456,8 @@ func (client DeploymentsClient) ExportTemplatePreparer(ctx context.Context, reso
// ExportTemplateSender sends the ExportTemplate request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) ExportTemplateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ExportTemplateResponder handles the response to the ExportTemplate request. The method always
@@ -541,8 +541,8 @@ func (client DeploymentsClient) GetPreparer(ctx context.Context, resourceGroupNa
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) GetSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
@@ -633,8 +633,8 @@ func (client DeploymentsClient) ListPreparer(ctx context.Context, resourceGroupN
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
@@ -765,8 +765,8 @@ func (client DeploymentsClient) ValidatePreparer(ctx context.Context, resourceGr
// ValidateSender sends the Validate request. The method will close the
// http.Response Body if it receives an error.
func (client DeploymentsClient) ValidateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ValidateResponder handles the response to the Validate request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/groups.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/groups.go
index b8fb3d070..9858b639b 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/groups.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/groups.go
@@ -107,8 +107,8 @@ func (client GroupsClient) CheckExistencePreparer(ctx context.Context, resourceG
// CheckExistenceSender sends the CheckExistence request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) CheckExistenceSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CheckExistenceResponder handles the response to the CheckExistence request. The method always
@@ -195,8 +195,8 @@ func (client GroupsClient) CreateOrUpdatePreparer(ctx context.Context, resourceG
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
@@ -272,9 +272,9 @@ func (client GroupsClient) DeletePreparer(ctx context.Context, resourceGroupName
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) DeleteSender(req *http.Request) (future GroupsDeleteFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
@@ -363,8 +363,8 @@ func (client GroupsClient) ExportTemplatePreparer(ctx context.Context, resourceG
// ExportTemplateSender sends the ExportTemplate request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) ExportTemplateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ExportTemplateResponder handles the response to the ExportTemplate request. The method always
@@ -446,8 +446,8 @@ func (client GroupsClient) GetPreparer(ctx context.Context, resourceGroupName st
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) GetSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
@@ -528,8 +528,8 @@ func (client GroupsClient) ListPreparer(ctx context.Context, filter string, top
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
@@ -661,8 +661,8 @@ func (client GroupsClient) ListResourcesPreparer(ctx context.Context, resourceGr
// ListResourcesSender sends the ListResources request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) ListResourcesSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResourcesResponder handles the response to the ListResources request. The method always
@@ -787,8 +787,8 @@ func (client GroupsClient) PatchPreparer(ctx context.Context, resourceGroupName
// PatchSender sends the Patch request. The method will close the
// http.Response Body if it receives an error.
func (client GroupsClient) PatchSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// PatchResponder handles the response to the Patch request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/providers.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/providers.go
index 66a4c8319..7ba9bdfc8 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/providers.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/providers.go
@@ -103,8 +103,8 @@ func (client ProvidersClient) GetPreparer(ctx context.Context, resourceProviderN
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ProvidersClient) GetSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
@@ -186,8 +186,8 @@ func (client ProvidersClient) ListPreparer(ctx context.Context, top *int32, expa
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client ProvidersClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
@@ -298,8 +298,8 @@ func (client ProvidersClient) RegisterPreparer(ctx context.Context, resourceProv
// RegisterSender sends the Register request. The method will close the
// http.Response Body if it receives an error.
func (client ProvidersClient) RegisterSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// RegisterResponder handles the response to the Register request. The method always
@@ -373,8 +373,8 @@ func (client ProvidersClient) UnregisterPreparer(ctx context.Context, resourcePr
// UnregisterSender sends the Unregister request. The method will close the
// http.Response Body if it receives an error.
func (client ProvidersClient) UnregisterSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// UnregisterResponder handles the response to the Unregister request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/resources.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/resources.go
index c197f530a..d0c2d4c5f 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/resources.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/resources.go
@@ -115,8 +115,8 @@ func (client Client) CheckExistencePreparer(ctx context.Context, resourceGroupNa
// CheckExistenceSender sends the CheckExistence request. The method will close the
// http.Response Body if it receives an error.
func (client Client) CheckExistenceSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CheckExistenceResponder handles the response to the CheckExistence request. The method always
@@ -208,8 +208,8 @@ func (client Client) CreateOrUpdatePreparer(ctx context.Context, resourceGroupNa
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client Client) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
@@ -299,8 +299,8 @@ func (client Client) DeletePreparer(ctx context.Context, resourceGroupName strin
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client Client) DeleteSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// DeleteResponder handles the response to the Delete request. The method always
@@ -389,8 +389,8 @@ func (client Client) GetPreparer(ctx context.Context, resourceGroupName string,
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client Client) GetSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
@@ -475,8 +475,8 @@ func (client Client) ListPreparer(ctx context.Context, filter string, expand str
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
@@ -593,9 +593,9 @@ func (client Client) MoveResourcesPreparer(ctx context.Context, sourceResourceGr
// MoveResourcesSender sends the MoveResources request. The method will close the
// http.Response Body if it receives an error.
func (client Client) MoveResourcesSender(req *http.Request) (future MoveResourcesFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
@@ -686,9 +686,9 @@ func (client Client) UpdatePreparer(ctx context.Context, resourceGroupName strin
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client Client) UpdateSender(req *http.Request) (future UpdateFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/tags.go b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/tags.go
index 7d5b286c4..62f9d05b9 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/tags.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources/tags.go
@@ -98,8 +98,8 @@ func (client TagsClient) CreateOrUpdatePreparer(ctx context.Context, tagName str
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client TagsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
@@ -175,8 +175,8 @@ func (client TagsClient) CreateOrUpdateValuePreparer(ctx context.Context, tagNam
// CreateOrUpdateValueSender sends the CreateOrUpdateValue request. The method will close the
// http.Response Body if it receives an error.
func (client TagsClient) CreateOrUpdateValueSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CreateOrUpdateValueResponder handles the response to the CreateOrUpdateValue request. The method always
@@ -250,8 +250,8 @@ func (client TagsClient) DeletePreparer(ctx context.Context, tagName string) (*h
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client TagsClient) DeleteSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// DeleteResponder handles the response to the Delete request. The method always
@@ -326,8 +326,8 @@ func (client TagsClient) DeleteValuePreparer(ctx context.Context, tagName string
// DeleteValueSender sends the DeleteValue request. The method will close the
// http.Response Body if it receives an error.
func (client TagsClient) DeleteValueSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// DeleteValueResponder handles the response to the DeleteValue request. The method always
@@ -398,8 +398,8 @@ func (client TagsClient) ListPreparer(ctx context.Context) (*http.Request, error
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client TagsClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/accounts.go b/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/accounts.go
index f381b9db1..4d553ca22 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/accounts.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/accounts.go
@@ -108,8 +108,8 @@ func (client AccountsClient) CheckNameAvailabilityPreparer(ctx context.Context,
// CheckNameAvailabilitySender sends the CheckNameAvailability request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) CheckNameAvailabilitySender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// CheckNameAvailabilityResponder handles the response to the CheckNameAvailability request. The method always
@@ -202,9 +202,9 @@ func (client AccountsClient) CreatePreparer(ctx context.Context, resourceGroupNa
// CreateSender sends the Create request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) CreateSender(req *http.Request) (future AccountsCreateFuture, err error) {
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
- resp, err = autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
@@ -293,8 +293,8 @@ func (client AccountsClient) DeletePreparer(ctx context.Context, resourceGroupNa
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) DeleteSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// DeleteResponder handles the response to the Delete request. The method always
@@ -378,8 +378,8 @@ func (client AccountsClient) GetPropertiesPreparer(ctx context.Context, resource
// GetPropertiesSender sends the GetProperties request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) GetPropertiesSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// GetPropertiesResponder handles the response to the GetProperties request. The method always
@@ -451,8 +451,8 @@ func (client AccountsClient) ListPreparer(ctx context.Context) (*http.Request, e
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
@@ -527,8 +527,8 @@ func (client AccountsClient) ListByResourceGroupPreparer(ctx context.Context, re
// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always
@@ -612,8 +612,8 @@ func (client AccountsClient) ListKeysPreparer(ctx context.Context, resourceGroup
// ListKeysSender sends the ListKeys request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) ListKeysSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListKeysResponder handles the response to the ListKeys request. The method always
@@ -702,8 +702,8 @@ func (client AccountsClient) RegenerateKeyPreparer(ctx context.Context, resource
// RegenerateKeySender sends the RegenerateKey request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) RegenerateKeySender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// RegenerateKeyResponder handles the response to the RegenerateKey request. The method always
@@ -795,8 +795,8 @@ func (client AccountsClient) UpdatePreparer(ctx context.Context, resourceGroupNa
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client AccountsClient) UpdateSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// UpdateResponder handles the response to the Update request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/usage.go b/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/usage.go
index d95583093..cc6ce3315 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/usage.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage/usage.go
@@ -95,8 +95,8 @@ func (client UsageClient) ListPreparer(ctx context.Context) (*http.Request, erro
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client UsageClient) ListSender(req *http.Request) (*http.Response, error) {
- return autorest.SendWithSender(client, req,
- azure.DoRetryWithRegistration(client.Client))
+ sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
+ return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/storage/table.go b/vendor/github.com/Azure/azure-sdk-for-go/storage/table.go
index 22d9b4f5c..0febf077f 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/storage/table.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/storage/table.go
@@ -355,8 +355,12 @@ func (t *Table) queryEntities(uri string, headers map[string]string, ml Metadata
return nil, err
}
v := originalURI.Query()
- v.Set(nextPartitionKeyQueryParameter, contToken.NextPartitionKey)
- v.Set(nextRowKeyQueryParameter, contToken.NextRowKey)
+ if contToken.NextPartitionKey != "" {
+ v.Set(nextPartitionKeyQueryParameter, contToken.NextPartitionKey)
+ }
+ if contToken.NextRowKey != "" {
+ v.Set(nextRowKeyQueryParameter, contToken.NextRowKey)
+ }
newURI := t.tsc.client.getEndpoint(tableServiceName, t.buildPath(), v)
entities.NextLink = &newURI
entities.ml = ml
@@ -371,7 +375,7 @@ func extractContinuationTokenFromHeaders(h http.Header) *continuationToken {
NextRowKey: h.Get(headerNextRowKey),
}
- if ct.NextPartitionKey != "" && ct.NextRowKey != "" {
+ if ct.NextPartitionKey != "" || ct.NextRowKey != "" {
return &ct
}
return nil
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/version/version.go b/vendor/github.com/Azure/azure-sdk-for-go/version/version.go
index 6305cdaa4..16c2988c0 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/version/version.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/version/version.go
@@ -18,4 +18,4 @@ package version
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// Number contains the semantic version of this SDK.
-const Number = "v31.2.0"
+const Number = "v32.5.0"
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/README.md b/vendor/github.com/Azure/go-autorest/autorest/adal/README.md
index 7b0c4bc4d..fec416a9c 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/adal/README.md
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/README.md
@@ -135,7 +135,7 @@ resource := "https://management.core.windows.net/"
applicationSecret := "APPLICATION_SECRET"
spt, err := adal.NewServicePrincipalToken(
- oauthConfig,
+ *oauthConfig,
appliationID,
applicationSecret,
resource,
@@ -170,7 +170,7 @@ if err != nil {
}
spt, err := adal.NewServicePrincipalTokenFromCertificate(
- oauthConfig,
+ *oauthConfig,
applicationID,
certificate,
rsaPrivateKey,
@@ -195,7 +195,7 @@ oauthClient := &http.Client{}
// Acquire the device code
deviceCode, err := adal.InitiateDeviceAuth(
oauthClient,
- oauthConfig,
+ *oauthConfig,
applicationID,
resource)
if err != nil {
@@ -212,7 +212,7 @@ if err != nil {
}
spt, err := adal.NewServicePrincipalTokenFromManualToken(
- oauthConfig,
+ *oauthConfig,
applicationID,
resource,
*token,
@@ -227,7 +227,7 @@ if (err == nil) {
```Go
spt, err := adal.NewServicePrincipalTokenFromUsernamePassword(
- oauthConfig,
+ *oauthConfig,
applicationID,
username,
password,
@@ -243,11 +243,11 @@ if (err == nil) {
``` Go
spt, err := adal.NewServicePrincipalTokenFromAuthorizationCode(
- oauthConfig,
+ *oauthConfig,
applicationID,
clientSecret,
- authorizationCode,
- redirectURI,
+ authorizationCode,
+ redirectURI,
resource,
callbacks...)
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/go.mod b/vendor/github.com/Azure/go-autorest/autorest/adal/go.mod
index 5c95dbfea..fdc5b90ca 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/adal/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/go.mod
@@ -3,9 +3,10 @@ module github.com/Azure/go-autorest/autorest/adal
go 1.12
require (
- github.com/Azure/go-autorest/autorest/date v0.1.0
- github.com/Azure/go-autorest/autorest/mocks v0.1.0
- github.com/Azure/go-autorest/tracing v0.1.0
+ github.com/Azure/go-autorest/autorest v0.9.0
+ github.com/Azure/go-autorest/autorest/date v0.2.0
+ github.com/Azure/go-autorest/autorest/mocks v0.3.0
+ github.com/Azure/go-autorest/tracing v0.5.0
github.com/dgrijalva/jwt-go v3.2.0+incompatible
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2
)
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/go.sum b/vendor/github.com/Azure/go-autorest/autorest/adal/go.sum
index ef9d10161..f0a018563 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/adal/go.sum
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/go.sum
@@ -1,139 +1,23 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12 h1:jGFvw3l57ViIVEPKKEUXPcLYIXJmQxLUh6ey1eJhwyc=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
+github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
+github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
+github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM=
+github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
github.com/Azure/go-autorest/autorest/mocks v0.1.0 h1:Kx+AUU2Te+A3JIyYn6Dfs+cFgx5XorQKuIXrZGoq/SI=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
-github.com/Azure/go-autorest/tracing v0.1.0 h1:TRBxC5Pj/fIuh4Qob0ZpkggbfT8RC0SubHbpV3p4/Vc=
-github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
-github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/census-instrumentation/opencensus-proto v0.2.0 h1:LzQXZOgg4CQfE6bFvXGM30YZL1WW/M337pXml+GrcZ4=
-github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.3.0 h1:qJumjCaCudz+OcqE9/XtEPfvtOjOmKaui4EOpFI6zZc=
+github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
+github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
+github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
-github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
-github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5 h1:2+KSC78XiO6Qy0hIjfc1OD9H+hsaJdJlb8Kqsd41CTE=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
-github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-go.opencensus.io v0.20.2 h1:NAfh7zF0/3/HqtMvJNZ/RFrSlCE6ZTlHmKfhL/Dm1Jk=
-go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-google.golang.org/api v0.3.1 h1:oJra/lMfmtm13/rgY/8i3MzjFWYXvQIAKjQ3HqofMk8=
-google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19 h1:Lj2SnHtxkRGJDqnGaSjo+CCdIieEnwVazbOXILwQemk=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM=
-google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
-gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go
new file mode 100644
index 000000000..28a4bfc4c
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go
@@ -0,0 +1,24 @@
+// +build modhack
+
+package adal
+
+// Copyright 2017 Microsoft Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of
+// the resultant binary.
+
+// Necessary for safely adding multi-module repo.
+// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository
+import _ "github.com/Azure/go-autorest/autorest"
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go b/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go
index 834401e00..d7e4372bb 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go
@@ -15,7 +15,12 @@ package adal
// limitations under the License.
import (
+ "crypto/tls"
"net/http"
+ "net/http/cookiejar"
+ "sync"
+
+ "github.com/Azure/go-autorest/tracing"
)
const (
@@ -23,6 +28,9 @@ const (
mimeTypeFormPost = "application/x-www-form-urlencoded"
)
+var defaultSender Sender
+var defaultSenderInit = &sync.Once{}
+
// Sender is the interface that wraps the Do method to send HTTP requests.
//
// The standard http.Client conforms to this interface.
@@ -45,7 +53,7 @@ type SendDecorator func(Sender) Sender
// CreateSender creates, decorates, and returns, as a Sender, the default http.Client.
func CreateSender(decorators ...SendDecorator) Sender {
- return DecorateSender(&http.Client{}, decorators...)
+ return DecorateSender(sender(), decorators...)
}
// DecorateSender accepts a Sender and a, possibly empty, set of SendDecorators, which is applies to
@@ -58,3 +66,30 @@ func DecorateSender(s Sender, decorators ...SendDecorator) Sender {
}
return s
}
+
+func sender() Sender {
+ // note that we can't init defaultSender in init() since it will
+ // execute before calling code has had a chance to enable tracing
+ defaultSenderInit.Do(func() {
+ // Use behaviour compatible with DefaultTransport, but require TLS minimum version.
+ defaultTransport := http.DefaultTransport.(*http.Transport)
+ transport := &http.Transport{
+ Proxy: defaultTransport.Proxy,
+ DialContext: defaultTransport.DialContext,
+ MaxIdleConns: defaultTransport.MaxIdleConns,
+ IdleConnTimeout: defaultTransport.IdleConnTimeout,
+ TLSHandshakeTimeout: defaultTransport.TLSHandshakeTimeout,
+ ExpectContinueTimeout: defaultTransport.ExpectContinueTimeout,
+ TLSClientConfig: &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ },
+ }
+ var roundTripper http.RoundTripper = transport
+ if tracing.IsEnabled() {
+ roundTripper = tracing.NewTransport(transport)
+ }
+ j, _ := cookiejar.New(nil)
+ defaultSender = &http.Client{Jar: j, Transport: roundTripper}
+ })
+ return defaultSender
+}
diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/token.go b/vendor/github.com/Azure/go-autorest/autorest/adal/token.go
index 4083e76e6..b72753498 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/adal/token.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/adal/token.go
@@ -34,7 +34,6 @@ import (
"time"
"github.com/Azure/go-autorest/autorest/date"
- "github.com/Azure/go-autorest/tracing"
"github.com/dgrijalva/jwt-go"
)
@@ -396,7 +395,7 @@ func (spt *ServicePrincipalToken) UnmarshalJSON(data []byte) error {
spt.refreshLock = &sync.RWMutex{}
}
if spt.sender == nil {
- spt.sender = &http.Client{Transport: tracing.Transport}
+ spt.sender = sender()
}
return nil
}
@@ -444,7 +443,7 @@ func NewServicePrincipalTokenWithSecret(oauthConfig OAuthConfig, id string, reso
RefreshWithin: defaultRefresh,
},
refreshLock: &sync.RWMutex{},
- sender: &http.Client{Transport: tracing.Transport},
+ sender: sender(),
refreshCallbacks: callbacks,
}
return spt, nil
@@ -685,7 +684,7 @@ func newServicePrincipalTokenFromMSI(msiEndpoint, resource string, userAssignedI
RefreshWithin: defaultRefresh,
},
refreshLock: &sync.RWMutex{},
- sender: &http.Client{Transport: tracing.Transport},
+ sender: sender(),
refreshCallbacks: callbacks,
MaxMSIRefreshAttempts: defaultMaxMSIRefreshAttempts,
}
@@ -1024,6 +1023,32 @@ func (mt *MultiTenantServicePrincipalToken) EnsureFreshWithContext(ctx context.C
return nil
}
+// RefreshWithContext obtains a fresh token for the Service Principal.
+func (mt *MultiTenantServicePrincipalToken) RefreshWithContext(ctx context.Context) error {
+ if err := mt.PrimaryToken.RefreshWithContext(ctx); err != nil {
+ return fmt.Errorf("failed to refresh primary token: %v", err)
+ }
+ for _, aux := range mt.AuxiliaryTokens {
+ if err := aux.RefreshWithContext(ctx); err != nil {
+ return fmt.Errorf("failed to refresh auxiliary token: %v", err)
+ }
+ }
+ return nil
+}
+
+// RefreshExchangeWithContext refreshes the token, but for a different resource.
+func (mt *MultiTenantServicePrincipalToken) RefreshExchangeWithContext(ctx context.Context, resource string) error {
+ if err := mt.PrimaryToken.RefreshExchangeWithContext(ctx, resource); err != nil {
+ return fmt.Errorf("failed to refresh primary token: %v", err)
+ }
+ for _, aux := range mt.AuxiliaryTokens {
+ if err := aux.RefreshExchangeWithContext(ctx, resource); err != nil {
+ return fmt.Errorf("failed to refresh auxiliary token: %v", err)
+ }
+ }
+ return nil
+}
+
// NewMultiTenantServicePrincipalToken creates a new MultiTenantServicePrincipalToken with the specified credentials and resource.
func NewMultiTenantServicePrincipalToken(multiTenantCfg MultiTenantOAuthConfig, clientID string, secret string, resource string) (*MultiTenantServicePrincipalToken, error) {
if err := validateStringParam(clientID, "clientID"); err != nil {
diff --git a/vendor/github.com/Azure/go-autorest/autorest/authorization.go b/vendor/github.com/Azure/go-autorest/autorest/authorization.go
index 380865cd6..54e87b5b6 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/authorization.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/authorization.go
@@ -15,6 +15,7 @@ package autorest
// limitations under the License.
import (
+ "crypto/tls"
"encoding/base64"
"fmt"
"net/http"
@@ -22,7 +23,6 @@ import (
"strings"
"github.com/Azure/go-autorest/autorest/adal"
- "github.com/Azure/go-autorest/tracing"
)
const (
@@ -149,11 +149,11 @@ type BearerAuthorizerCallback struct {
// NewBearerAuthorizerCallback creates a bearer authorization callback. The callback
// is invoked when the HTTP request is submitted.
-func NewBearerAuthorizerCallback(sender Sender, callback BearerAuthorizerCallbackFunc) *BearerAuthorizerCallback {
- if sender == nil {
- sender = &http.Client{Transport: tracing.Transport}
+func NewBearerAuthorizerCallback(s Sender, callback BearerAuthorizerCallbackFunc) *BearerAuthorizerCallback {
+ if s == nil {
+ s = sender(tls.RenegotiateNever)
}
- return &BearerAuthorizerCallback{sender: sender, callback: callback}
+ return &BearerAuthorizerCallback{sender: s, callback: callback}
}
// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose value
diff --git a/vendor/github.com/Azure/go-autorest/autorest/azure/async.go b/vendor/github.com/Azure/go-autorest/autorest/azure/async.go
index 02d011961..1cb41cbeb 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/azure/async.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/azure/async.go
@@ -417,6 +417,11 @@ func (pt *pollingTrackerBase) pollForStatus(ctx context.Context, sender autorest
}
req = req.WithContext(ctx)
+ preparer := autorest.CreatePreparer(autorest.GetPrepareDecorators(ctx)...)
+ req, err = preparer.Prepare(req)
+ if err != nil {
+ return autorest.NewErrorWithError(err, "pollingTrackerBase", "pollForStatus", nil, "failed preparing HTTP request")
+ }
pt.resp, err = sender.Do(req)
if err != nil {
return autorest.NewErrorWithError(err, "pollingTrackerBase", "pollForStatus", nil, "failed to send HTTP request")
diff --git a/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.mod b/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.mod
index a147222b4..cef48f1ea 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.mod
@@ -3,7 +3,7 @@ module github.com/Azure/go-autorest/autorest/azure/cli
go 1.12
require (
- github.com/Azure/go-autorest/autorest/adal v0.1.0
+ github.com/Azure/go-autorest/autorest/adal v0.5.0
github.com/Azure/go-autorest/autorest/date v0.1.0
github.com/dimchansky/utfbom v1.1.0
github.com/mitchellh/go-homedir v1.1.0
diff --git a/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.sum b/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.sum
index 1d098cff2..2d6636a33 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.sum
+++ b/vendor/github.com/Azure/go-autorest/autorest/azure/cli/go.sum
@@ -1,144 +1,17 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12 h1:jGFvw3l57ViIVEPKKEUXPcLYIXJmQxLUh6ey1eJhwyc=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
-github.com/Azure/go-autorest/autorest/adal v0.1.0 h1:RSw/7EAullliqwkZvgIGDYZWQm1PGKXI8c4aY/87yuU=
-github.com/Azure/go-autorest/autorest/adal v0.1.0/go.mod h1:MeS4XhScH55IST095THyTxElntu7WqB7pNbZo8Q5G3E=
+github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIrpH6ediguzdAzXAUU=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
github.com/Azure/go-autorest/autorest/mocks v0.1.0 h1:Kx+AUU2Te+A3JIyYn6Dfs+cFgx5XorQKuIXrZGoq/SI=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
-github.com/Azure/go-autorest/tracing v0.1.0 h1:TRBxC5Pj/fIuh4Qob0ZpkggbfT8RC0SubHbpV3p4/Vc=
-github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
-github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/census-instrumentation/opencensus-proto v0.2.0 h1:LzQXZOgg4CQfE6bFvXGM30YZL1WW/M337pXml+GrcZ4=
-github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
-github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
-github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5 h1:2+KSC78XiO6Qy0hIjfc1OD9H+hsaJdJlb8Kqsd41CTE=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
-github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-go.opencensus.io v0.20.2 h1:NAfh7zF0/3/HqtMvJNZ/RFrSlCE6ZTlHmKfhL/Dm1Jk=
-go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-google.golang.org/api v0.3.1 h1:oJra/lMfmtm13/rgY/8i3MzjFWYXvQIAKjQ3HqofMk8=
-google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19 h1:Lj2SnHtxkRGJDqnGaSjo+CCdIieEnwVazbOXILwQemk=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM=
-google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
-gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/client.go b/vendor/github.com/Azure/go-autorest/autorest/client.go
index 92da6adb2..1c6a0617a 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/client.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/client.go
@@ -22,12 +22,10 @@ import (
"io/ioutil"
"log"
"net/http"
- "net/http/cookiejar"
"strings"
"time"
"github.com/Azure/go-autorest/logger"
- "github.com/Azure/go-autorest/tracing"
)
const (
@@ -264,30 +262,8 @@ func (c Client) Do(r *http.Request) (*http.Response, error) {
// sender returns the Sender to which to send requests.
func (c Client) sender(renengotiation tls.RenegotiationSupport) Sender {
if c.Sender == nil {
- // Use behaviour compatible with DefaultTransport, but require TLS minimum version.
- var defaultTransport = http.DefaultTransport.(*http.Transport)
- transport := tracing.Transport
- // for non-default values of TLS renegotiation create a new tracing transport.
- // updating tracing.Transport affects all clients which is not what we want.
- if renengotiation != tls.RenegotiateNever {
- transport = tracing.NewTransport()
- }
- transport.Base = &http.Transport{
- Proxy: defaultTransport.Proxy,
- DialContext: defaultTransport.DialContext,
- MaxIdleConns: defaultTransport.MaxIdleConns,
- IdleConnTimeout: defaultTransport.IdleConnTimeout,
- TLSHandshakeTimeout: defaultTransport.TLSHandshakeTimeout,
- ExpectContinueTimeout: defaultTransport.ExpectContinueTimeout,
- TLSClientConfig: &tls.Config{
- MinVersion: tls.VersionTLS12,
- Renegotiation: renengotiation,
- },
- }
- j, _ := cookiejar.New(nil)
- return &http.Client{Jar: j, Transport: transport}
+ return sender(renengotiation)
}
-
return c.Sender
}
diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/go.mod b/vendor/github.com/Azure/go-autorest/autorest/date/go.mod
index 13a1e9803..3adc4804c 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/date/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/date/go.mod
@@ -1,3 +1,5 @@
module github.com/Azure/go-autorest/autorest/date
go 1.12
+
+require github.com/Azure/go-autorest/autorest v0.9.0
diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/go.sum b/vendor/github.com/Azure/go-autorest/autorest/date/go.sum
new file mode 100644
index 000000000..9e2ee7a94
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/date/go.sum
@@ -0,0 +1,16 @@
+github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
+github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
+github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIrpH6ediguzdAzXAUU=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
+github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
+github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
+github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go
new file mode 100644
index 000000000..55adf930f
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go
@@ -0,0 +1,24 @@
+// +build modhack
+
+package date
+
+// Copyright 2017 Microsoft Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of
+// the resultant binary.
+
+// Necessary for safely adding multi-module repo.
+// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository
+import _ "github.com/Azure/go-autorest/autorest"
diff --git a/vendor/github.com/Azure/go-autorest/autorest/go.mod b/vendor/github.com/Azure/go-autorest/autorest/go.mod
index 6b7c7f276..ab2ae66ac 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/go.mod
@@ -3,10 +3,9 @@ module github.com/Azure/go-autorest/autorest
go 1.12
require (
- github.com/Azure/go-autorest/autorest/adal v0.2.0
- github.com/Azure/go-autorest/autorest/mocks v0.1.0
+ github.com/Azure/go-autorest/autorest/adal v0.5.0
+ github.com/Azure/go-autorest/autorest/mocks v0.2.0
github.com/Azure/go-autorest/logger v0.1.0
- github.com/Azure/go-autorest/tracing v0.1.0
- go.opencensus.io v0.20.2
+ github.com/Azure/go-autorest/tracing v0.5.0
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2
)
diff --git a/vendor/github.com/Azure/go-autorest/autorest/go.sum b/vendor/github.com/Azure/go-autorest/autorest/go.sum
index a6848303f..729b99cd0 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/go.sum
+++ b/vendor/github.com/Azure/go-autorest/autorest/go.sum
@@ -1,143 +1,18 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12 h1:jGFvw3l57ViIVEPKKEUXPcLYIXJmQxLUh6ey1eJhwyc=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
-github.com/Azure/go-autorest/autorest/adal v0.2.0 h1:7IBDu1jgh+ADHXnEYExkV9RE/ztOOlxdACkkPRthGKw=
-github.com/Azure/go-autorest/autorest/adal v0.2.0/go.mod h1:MeS4XhScH55IST095THyTxElntu7WqB7pNbZo8Q5G3E=
+github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIrpH6ediguzdAzXAUU=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
github.com/Azure/go-autorest/autorest/mocks v0.1.0 h1:Kx+AUU2Te+A3JIyYn6Dfs+cFgx5XorQKuIXrZGoq/SI=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
-github.com/Azure/go-autorest/tracing v0.1.0 h1:TRBxC5Pj/fIuh4Qob0ZpkggbfT8RC0SubHbpV3p4/Vc=
-github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
-github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/census-instrumentation/opencensus-proto v0.2.0 h1:LzQXZOgg4CQfE6bFvXGM30YZL1WW/M337pXml+GrcZ4=
-github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
-github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
-github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5 h1:2+KSC78XiO6Qy0hIjfc1OD9H+hsaJdJlb8Kqsd41CTE=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
-github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-go.opencensus.io v0.20.2 h1:NAfh7zF0/3/HqtMvJNZ/RFrSlCE6ZTlHmKfhL/Dm1Jk=
-go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-google.golang.org/api v0.3.1 h1:oJra/lMfmtm13/rgY/8i3MzjFWYXvQIAKjQ3HqofMk8=
-google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19 h1:Lj2SnHtxkRGJDqnGaSjo+CCdIieEnwVazbOXILwQemk=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM=
-google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
-gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/preparer.go b/vendor/github.com/Azure/go-autorest/autorest/preparer.go
index 489140224..9f864ab1a 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/preparer.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/preparer.go
@@ -16,6 +16,7 @@ package autorest
import (
"bytes"
+ "context"
"encoding/json"
"encoding/xml"
"fmt"
@@ -38,6 +39,27 @@ const (
headerUserAgent = "User-Agent"
)
+// used as a key type in context.WithValue()
+type ctxPrepareDecorators struct{}
+
+// WithPrepareDecorators adds the specified PrepareDecorators to the provided context.
+// If no PrepareDecorators are provided the context is unchanged.
+func WithPrepareDecorators(ctx context.Context, prepareDecorator []PrepareDecorator) context.Context {
+ if len(prepareDecorator) == 0 {
+ return ctx
+ }
+ return context.WithValue(ctx, ctxPrepareDecorators{}, prepareDecorator)
+}
+
+// GetPrepareDecorators returns the PrepareDecorators in the provided context or the provided default PrepareDecorators.
+func GetPrepareDecorators(ctx context.Context, defaultPrepareDecorators ...PrepareDecorator) []PrepareDecorator {
+ inCtx := ctx.Value(ctxPrepareDecorators{})
+ if pd, ok := inCtx.([]PrepareDecorator); ok {
+ return pd
+ }
+ return defaultPrepareDecorators
+}
+
// Preparer is the interface that wraps the Prepare method.
//
// Prepare accepts and possibly modifies an http.Request (e.g., adding Headers). Implementations
diff --git a/vendor/github.com/Azure/go-autorest/autorest/sender.go b/vendor/github.com/Azure/go-autorest/autorest/sender.go
index b918833fa..e582489b3 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/sender.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/sender.go
@@ -16,10 +16,12 @@ package autorest
import (
"context"
+ "crypto/tls"
"fmt"
"log"
"math"
"net/http"
+ "net/http/cookiejar"
"strconv"
"time"
@@ -30,7 +32,11 @@ import (
type ctxSendDecorators struct{}
// WithSendDecorators adds the specified SendDecorators to the provided context.
+// If no SendDecorators are provided the context is unchanged.
func WithSendDecorators(ctx context.Context, sendDecorator []SendDecorator) context.Context {
+ if len(sendDecorator) == 0 {
+ return ctx
+ }
return context.WithValue(ctx, ctxSendDecorators{}, sendDecorator)
}
@@ -65,7 +71,7 @@ type SendDecorator func(Sender) Sender
// CreateSender creates, decorates, and returns, as a Sender, the default http.Client.
func CreateSender(decorators ...SendDecorator) Sender {
- return DecorateSender(&http.Client{}, decorators...)
+ return DecorateSender(sender(tls.RenegotiateNever), decorators...)
}
// DecorateSender accepts a Sender and a, possibly empty, set of SendDecorators, which is applies to
@@ -88,7 +94,7 @@ func DecorateSender(s Sender, decorators ...SendDecorator) Sender {
//
// Send will not poll or retry requests.
func Send(r *http.Request, decorators ...SendDecorator) (*http.Response, error) {
- return SendWithSender(&http.Client{Transport: tracing.Transport}, r, decorators...)
+ return SendWithSender(sender(tls.RenegotiateNever), r, decorators...)
}
// SendWithSender sends the passed http.Request, through the provided Sender, returning the
@@ -100,6 +106,29 @@ func SendWithSender(s Sender, r *http.Request, decorators ...SendDecorator) (*ht
return DecorateSender(s, decorators...).Do(r)
}
+func sender(renengotiation tls.RenegotiationSupport) Sender {
+ // Use behaviour compatible with DefaultTransport, but require TLS minimum version.
+ defaultTransport := http.DefaultTransport.(*http.Transport)
+ transport := &http.Transport{
+ Proxy: defaultTransport.Proxy,
+ DialContext: defaultTransport.DialContext,
+ MaxIdleConns: defaultTransport.MaxIdleConns,
+ IdleConnTimeout: defaultTransport.IdleConnTimeout,
+ TLSHandshakeTimeout: defaultTransport.TLSHandshakeTimeout,
+ ExpectContinueTimeout: defaultTransport.ExpectContinueTimeout,
+ TLSClientConfig: &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ Renegotiation: renengotiation,
+ },
+ }
+ var roundTripper http.RoundTripper = transport
+ if tracing.IsEnabled() {
+ roundTripper = tracing.NewTransport(transport)
+ }
+ j, _ := cookiejar.New(nil)
+ return &http.Client{Jar: j, Transport: roundTripper}
+}
+
// AfterDelay returns a SendDecorator that delays for the passed time.Duration before
// invoking the Sender. The delay may be terminated by closing the optional channel on the
// http.Request. If canceled, no further Senders are invoked.
diff --git a/vendor/github.com/Azure/go-autorest/autorest/to/go.mod b/vendor/github.com/Azure/go-autorest/autorest/to/go.mod
index a2054be36..48fd8c6e5 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/to/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/to/go.mod
@@ -1,3 +1,5 @@
module github.com/Azure/go-autorest/autorest/to
go 1.12
+
+require github.com/Azure/go-autorest/autorest v0.9.0
diff --git a/vendor/github.com/Azure/go-autorest/autorest/to/go.sum b/vendor/github.com/Azure/go-autorest/autorest/to/go.sum
new file mode 100644
index 000000000..d7ee6b462
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/to/go.sum
@@ -0,0 +1,17 @@
+github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
+github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
+github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIrpH6ediguzdAzXAUU=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
+github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
+github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
+github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
+github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/to/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/to/go_mod_tidy_hack.go
new file mode 100644
index 000000000..8e8292107
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/to/go_mod_tidy_hack.go
@@ -0,0 +1,24 @@
+// +build modhack
+
+package to
+
+// Copyright 2017 Microsoft Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of
+// the resultant binary.
+
+// Necessary for safely adding multi-module repo.
+// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository
+import _ "github.com/Azure/go-autorest/autorest"
diff --git a/vendor/github.com/Azure/go-autorest/autorest/validation/go.mod b/vendor/github.com/Azure/go-autorest/autorest/validation/go.mod
index c44c51ff6..b3f9b6a09 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/validation/go.mod
+++ b/vendor/github.com/Azure/go-autorest/autorest/validation/go.mod
@@ -2,4 +2,7 @@ module github.com/Azure/go-autorest/autorest/validation
go 1.12
-require github.com/stretchr/testify v1.3.0
+require (
+ github.com/Azure/go-autorest/autorest v0.9.0
+ github.com/stretchr/testify v1.3.0
+)
diff --git a/vendor/github.com/Azure/go-autorest/autorest/validation/go.sum b/vendor/github.com/Azure/go-autorest/autorest/validation/go.sum
index 4347755af..6b9010a73 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/validation/go.sum
+++ b/vendor/github.com/Azure/go-autorest/autorest/validation/go.sum
@@ -1,7 +1,24 @@
+github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
+github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
+github.com/Azure/go-autorest/autorest/adal v0.5.0 h1:q2gDruN08/guU9vAjuPWff0+QIrpH6ediguzdAzXAUU=
+github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
+github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM=
+github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
+github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0=
+github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
+github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
+github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
+github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
+github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/vendor/github.com/Azure/go-autorest/autorest/validation/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/validation/go_mod_tidy_hack.go
new file mode 100644
index 000000000..2b2668581
--- /dev/null
+++ b/vendor/github.com/Azure/go-autorest/autorest/validation/go_mod_tidy_hack.go
@@ -0,0 +1,24 @@
+// +build modhack
+
+package validation
+
+// Copyright 2017 Microsoft Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of
+// the resultant binary.
+
+// Necessary for safely adding multi-module repo.
+// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository
+import _ "github.com/Azure/go-autorest/autorest"
diff --git a/vendor/github.com/Azure/go-autorest/autorest/version.go b/vendor/github.com/Azure/go-autorest/autorest/version.go
index ec7e84817..cb851937a 100644
--- a/vendor/github.com/Azure/go-autorest/autorest/version.go
+++ b/vendor/github.com/Azure/go-autorest/autorest/version.go
@@ -19,7 +19,7 @@ import (
"runtime"
)
-const number = "v12.3.0"
+const number = "v13.0.0"
var (
userAgent = fmt.Sprintf("Go/%s (%s-%s) go-autorest/%s",
diff --git a/vendor/github.com/Azure/go-autorest/tracing/go.mod b/vendor/github.com/Azure/go-autorest/tracing/go.mod
index b066b3e6e..25c34c108 100644
--- a/vendor/github.com/Azure/go-autorest/tracing/go.mod
+++ b/vendor/github.com/Azure/go-autorest/tracing/go.mod
@@ -1,8 +1,3 @@
module github.com/Azure/go-autorest/tracing
go 1.12
-
-require (
- contrib.go.opencensus.io/exporter/ocagent v0.4.12
- go.opencensus.io v0.20.2
-)
diff --git a/vendor/github.com/Azure/go-autorest/tracing/go.sum b/vendor/github.com/Azure/go-autorest/tracing/go.sum
deleted file mode 100644
index 4e5548c91..000000000
--- a/vendor/github.com/Azure/go-autorest/tracing/go.sum
+++ /dev/null
@@ -1,130 +0,0 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12 h1:jGFvw3l57ViIVEPKKEUXPcLYIXJmQxLUh6ey1eJhwyc=
-contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
-github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/census-instrumentation/opencensus-proto v0.2.0 h1:LzQXZOgg4CQfE6bFvXGM30YZL1WW/M337pXml+GrcZ4=
-github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
-github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
-github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5 h1:2+KSC78XiO6Qy0hIjfc1OD9H+hsaJdJlb8Kqsd41CTE=
-github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
-github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-go.opencensus.io v0.20.2 h1:NAfh7zF0/3/HqtMvJNZ/RFrSlCE6ZTlHmKfhL/Dm1Jk=
-go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-google.golang.org/api v0.3.1 h1:oJra/lMfmtm13/rgY/8i3MzjFWYXvQIAKjQ3HqofMk8=
-google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19 h1:Lj2SnHtxkRGJDqnGaSjo+CCdIieEnwVazbOXILwQemk=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM=
-google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
-gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/vendor/github.com/Azure/go-autorest/tracing/tracing.go b/vendor/github.com/Azure/go-autorest/tracing/tracing.go
index 28951c284..0e7a6e962 100644
--- a/vendor/github.com/Azure/go-autorest/tracing/tracing.go
+++ b/vendor/github.com/Azure/go-autorest/tracing/tracing.go
@@ -16,180 +16,52 @@ package tracing
import (
"context"
- "fmt"
"net/http"
- "os"
-
- "contrib.go.opencensus.io/exporter/ocagent"
- "go.opencensus.io/plugin/ochttp"
- "go.opencensus.io/plugin/ochttp/propagation/tracecontext"
- "go.opencensus.io/stats/view"
- "go.opencensus.io/trace"
)
+// Tracer represents an HTTP tracing facility.
+type Tracer interface {
+ NewTransport(base *http.Transport) http.RoundTripper
+ StartSpan(ctx context.Context, name string) context.Context
+ EndSpan(ctx context.Context, httpStatusCode int, err error)
+}
+
var (
- // Transport is the default tracing RoundTripper. The custom options setter will control
- // if traces are being emitted or not.
- Transport = NewTransport()
-
- // enabled is the flag for marking if tracing is enabled.
- enabled = false
-
- // Sampler is the tracing sampler. If tracing is disabled it will never sample. Otherwise
- // it will be using the parent sampler or the default.
- sampler = trace.NeverSample()
-
- // Views for metric instrumentation.
- views = map[string]*view.View{}
-
- // the trace exporter
- traceExporter trace.Exporter
+ tracer Tracer
)
-func init() {
- enableFromEnv()
+// Register will register the provided Tracer. Pass nil to unregister a Tracer.
+func Register(t Tracer) {
+ tracer = t
}
-func enableFromEnv() {
- _, ok := os.LookupEnv("AZURE_SDK_TRACING_ENABLED")
- _, legacyOk := os.LookupEnv("AZURE_SDK_TRACING_ENABELD")
- if ok || legacyOk {
- agentEndpoint, ok := os.LookupEnv("OCAGENT_TRACE_EXPORTER_ENDPOINT")
-
- if ok {
- EnableWithAIForwarding(agentEndpoint)
- } else {
- Enable()
- }
- }
-}
-
-// NewTransport returns a new instance of a tracing-aware RoundTripper.
-func NewTransport() *ochttp.Transport {
- return &ochttp.Transport{
- Propagation: &tracecontext.HTTPFormat{},
- GetStartOptions: getStartOptions,
- }
-}
-
-// IsEnabled returns true if monitoring is enabled for the sdk.
+// IsEnabled returns true if a Tracer has been registered.
func IsEnabled() bool {
- return enabled
+ return tracer != nil
}
-// Enable will start instrumentation for metrics and traces.
-func Enable() error {
- enabled = true
- sampler = nil
-
- err := initStats()
- return err
+// NewTransport creates a new instrumenting http.RoundTripper for the
+// registered Tracer. If no Tracer has been registered it returns nil.
+func NewTransport(base *http.Transport) http.RoundTripper {
+ if tracer != nil {
+ return tracer.NewTransport(base)
+ }
+ return nil
}
-// Disable will disable instrumentation for metrics and traces.
-func Disable() {
- disableStats()
- sampler = trace.NeverSample()
- if traceExporter != nil {
- trace.UnregisterExporter(traceExporter)
- }
- enabled = false
-}
-
-// EnableWithAIForwarding will start instrumentation and will connect to app insights forwarder
-// exporter making the metrics and traces available in app insights.
-func EnableWithAIForwarding(agentEndpoint string) (err error) {
- err = Enable()
- if err != nil {
- return err
- }
-
- traceExporter, err := ocagent.NewExporter(ocagent.WithInsecure(), ocagent.WithAddress(agentEndpoint))
- if err != nil {
- return err
- }
- trace.RegisterExporter(traceExporter)
- return
-}
-
-// getStartOptions is the custom options setter for the ochttp package.
-func getStartOptions(*http.Request) trace.StartOptions {
- return trace.StartOptions{
- Sampler: sampler,
- }
-}
-
-// initStats registers the views for the http metrics
-func initStats() (err error) {
- clientViews := []*view.View{
- ochttp.ClientCompletedCount,
- ochttp.ClientRoundtripLatencyDistribution,
- ochttp.ClientReceivedBytesDistribution,
- ochttp.ClientSentBytesDistribution,
- }
- for _, cv := range clientViews {
- vn := fmt.Sprintf("Azure/go-autorest/tracing-%s", cv.Name)
- views[vn] = cv.WithName(vn)
- err = view.Register(views[vn])
- if err != nil {
- return err
- }
- }
- return
-}
-
-// disableStats will unregister the previously registered metrics
-func disableStats() {
- for _, v := range views {
- view.Unregister(v)
- }
-}
-
-// StartSpan starts a trace span
+// StartSpan starts a trace span with the specified name, associating it with the
+// provided context. Has no effect if a Tracer has not been registered.
func StartSpan(ctx context.Context, name string) context.Context {
- ctx, _ = trace.StartSpan(ctx, name, trace.WithSampler(sampler))
+ if tracer != nil {
+ return tracer.StartSpan(ctx, name)
+ }
return ctx
}
-// EndSpan ends a previously started span stored in the context
+// EndSpan ends a previously started span stored in the context.
+// Has no effect if a Tracer has not been registered.
func EndSpan(ctx context.Context, httpStatusCode int, err error) {
- span := trace.FromContext(ctx)
-
- if span == nil {
- return
- }
-
- if err != nil {
- span.SetStatus(trace.Status{Message: err.Error(), Code: toTraceStatusCode(httpStatusCode)})
- }
- span.End()
-}
-
-// toTraceStatusCode converts HTTP Codes to OpenCensus codes as defined
-// at https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/HTTP.md#status
-func toTraceStatusCode(httpStatusCode int) int32 {
- switch {
- case http.StatusOK <= httpStatusCode && httpStatusCode < http.StatusBadRequest:
- return trace.StatusCodeOK
- case httpStatusCode == http.StatusBadRequest:
- return trace.StatusCodeInvalidArgument
- case httpStatusCode == http.StatusUnauthorized: // 401 is actually unauthenticated.
- return trace.StatusCodeUnauthenticated
- case httpStatusCode == http.StatusForbidden:
- return trace.StatusCodePermissionDenied
- case httpStatusCode == http.StatusNotFound:
- return trace.StatusCodeNotFound
- case httpStatusCode == http.StatusTooManyRequests:
- return trace.StatusCodeResourceExhausted
- case httpStatusCode == 499:
- return trace.StatusCodeCancelled
- case httpStatusCode == http.StatusNotImplemented:
- return trace.StatusCodeUnimplemented
- case httpStatusCode == http.StatusServiceUnavailable:
- return trace.StatusCodeUnavailable
- case httpStatusCode == http.StatusGatewayTimeout:
- return trace.StatusCodeDeadlineExceeded
- default:
- return trace.StatusCodeUnknown
+ if tracer != nil {
+ tracer.EndSpan(ctx, httpStatusCode, err)
}
}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/AUTHORS b/vendor/github.com/census-instrumentation/opencensus-proto/AUTHORS
deleted file mode 100644
index e068e731e..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/AUTHORS
+++ /dev/null
@@ -1 +0,0 @@
-Google Inc.
\ No newline at end of file
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/LICENSE b/vendor/github.com/census-instrumentation/opencensus-proto/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1/common.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1/common.pb.go
deleted file mode 100644
index 12b578d06..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1/common.pb.go
+++ /dev/null
@@ -1,356 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/agent/common/v1/common.proto
-
-package v1
-
-import (
- fmt "fmt"
- proto "github.com/golang/protobuf/proto"
- timestamp "github.com/golang/protobuf/ptypes/timestamp"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-type LibraryInfo_Language int32
-
-const (
- LibraryInfo_LANGUAGE_UNSPECIFIED LibraryInfo_Language = 0
- LibraryInfo_CPP LibraryInfo_Language = 1
- LibraryInfo_C_SHARP LibraryInfo_Language = 2
- LibraryInfo_ERLANG LibraryInfo_Language = 3
- LibraryInfo_GO_LANG LibraryInfo_Language = 4
- LibraryInfo_JAVA LibraryInfo_Language = 5
- LibraryInfo_NODE_JS LibraryInfo_Language = 6
- LibraryInfo_PHP LibraryInfo_Language = 7
- LibraryInfo_PYTHON LibraryInfo_Language = 8
- LibraryInfo_RUBY LibraryInfo_Language = 9
-)
-
-var LibraryInfo_Language_name = map[int32]string{
- 0: "LANGUAGE_UNSPECIFIED",
- 1: "CPP",
- 2: "C_SHARP",
- 3: "ERLANG",
- 4: "GO_LANG",
- 5: "JAVA",
- 6: "NODE_JS",
- 7: "PHP",
- 8: "PYTHON",
- 9: "RUBY",
-}
-
-var LibraryInfo_Language_value = map[string]int32{
- "LANGUAGE_UNSPECIFIED": 0,
- "CPP": 1,
- "C_SHARP": 2,
- "ERLANG": 3,
- "GO_LANG": 4,
- "JAVA": 5,
- "NODE_JS": 6,
- "PHP": 7,
- "PYTHON": 8,
- "RUBY": 9,
-}
-
-func (x LibraryInfo_Language) String() string {
- return proto.EnumName(LibraryInfo_Language_name, int32(x))
-}
-
-func (LibraryInfo_Language) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_126c72ed8a252c84, []int{2, 0}
-}
-
-// Identifier metadata of the Node that produces the span or tracing data.
-// Note, this is not the metadata about the Node or service that is described by associated spans.
-// In the future we plan to extend the identifier proto definition to support
-// additional information (e.g cloud id, etc.)
-type Node struct {
- // Identifier that uniquely identifies a process within a VM/container.
- Identifier *ProcessIdentifier `protobuf:"bytes,1,opt,name=identifier,proto3" json:"identifier,omitempty"`
- // Information on the OpenCensus Library that initiates the stream.
- LibraryInfo *LibraryInfo `protobuf:"bytes,2,opt,name=library_info,json=libraryInfo,proto3" json:"library_info,omitempty"`
- // Additional information on service.
- ServiceInfo *ServiceInfo `protobuf:"bytes,3,opt,name=service_info,json=serviceInfo,proto3" json:"service_info,omitempty"`
- // Additional attributes.
- Attributes map[string]string `protobuf:"bytes,4,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Node) Reset() { *m = Node{} }
-func (m *Node) String() string { return proto.CompactTextString(m) }
-func (*Node) ProtoMessage() {}
-func (*Node) Descriptor() ([]byte, []int) {
- return fileDescriptor_126c72ed8a252c84, []int{0}
-}
-
-func (m *Node) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Node.Unmarshal(m, b)
-}
-func (m *Node) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Node.Marshal(b, m, deterministic)
-}
-func (m *Node) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Node.Merge(m, src)
-}
-func (m *Node) XXX_Size() int {
- return xxx_messageInfo_Node.Size(m)
-}
-func (m *Node) XXX_DiscardUnknown() {
- xxx_messageInfo_Node.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Node proto.InternalMessageInfo
-
-func (m *Node) GetIdentifier() *ProcessIdentifier {
- if m != nil {
- return m.Identifier
- }
- return nil
-}
-
-func (m *Node) GetLibraryInfo() *LibraryInfo {
- if m != nil {
- return m.LibraryInfo
- }
- return nil
-}
-
-func (m *Node) GetServiceInfo() *ServiceInfo {
- if m != nil {
- return m.ServiceInfo
- }
- return nil
-}
-
-func (m *Node) GetAttributes() map[string]string {
- if m != nil {
- return m.Attributes
- }
- return nil
-}
-
-// Identifier that uniquely identifies a process within a VM/container.
-type ProcessIdentifier struct {
- // The host name. Usually refers to the machine/container name.
- // For example: os.Hostname() in Go, socket.gethostname() in Python.
- HostName string `protobuf:"bytes,1,opt,name=host_name,json=hostName,proto3" json:"host_name,omitempty"`
- // Process id.
- Pid uint32 `protobuf:"varint,2,opt,name=pid,proto3" json:"pid,omitempty"`
- // Start time of this ProcessIdentifier. Represented in epoch time.
- StartTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_timestamp,json=startTimestamp,proto3" json:"start_timestamp,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ProcessIdentifier) Reset() { *m = ProcessIdentifier{} }
-func (m *ProcessIdentifier) String() string { return proto.CompactTextString(m) }
-func (*ProcessIdentifier) ProtoMessage() {}
-func (*ProcessIdentifier) Descriptor() ([]byte, []int) {
- return fileDescriptor_126c72ed8a252c84, []int{1}
-}
-
-func (m *ProcessIdentifier) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ProcessIdentifier.Unmarshal(m, b)
-}
-func (m *ProcessIdentifier) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ProcessIdentifier.Marshal(b, m, deterministic)
-}
-func (m *ProcessIdentifier) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ProcessIdentifier.Merge(m, src)
-}
-func (m *ProcessIdentifier) XXX_Size() int {
- return xxx_messageInfo_ProcessIdentifier.Size(m)
-}
-func (m *ProcessIdentifier) XXX_DiscardUnknown() {
- xxx_messageInfo_ProcessIdentifier.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ProcessIdentifier proto.InternalMessageInfo
-
-func (m *ProcessIdentifier) GetHostName() string {
- if m != nil {
- return m.HostName
- }
- return ""
-}
-
-func (m *ProcessIdentifier) GetPid() uint32 {
- if m != nil {
- return m.Pid
- }
- return 0
-}
-
-func (m *ProcessIdentifier) GetStartTimestamp() *timestamp.Timestamp {
- if m != nil {
- return m.StartTimestamp
- }
- return nil
-}
-
-// Information on OpenCensus Library.
-type LibraryInfo struct {
- // Language of OpenCensus Library.
- Language LibraryInfo_Language `protobuf:"varint,1,opt,name=language,proto3,enum=opencensus.proto.agent.common.v1.LibraryInfo_Language" json:"language,omitempty"`
- // Version of Agent exporter of Library.
- ExporterVersion string `protobuf:"bytes,2,opt,name=exporter_version,json=exporterVersion,proto3" json:"exporter_version,omitempty"`
- // Version of OpenCensus Library.
- CoreLibraryVersion string `protobuf:"bytes,3,opt,name=core_library_version,json=coreLibraryVersion,proto3" json:"core_library_version,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *LibraryInfo) Reset() { *m = LibraryInfo{} }
-func (m *LibraryInfo) String() string { return proto.CompactTextString(m) }
-func (*LibraryInfo) ProtoMessage() {}
-func (*LibraryInfo) Descriptor() ([]byte, []int) {
- return fileDescriptor_126c72ed8a252c84, []int{2}
-}
-
-func (m *LibraryInfo) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_LibraryInfo.Unmarshal(m, b)
-}
-func (m *LibraryInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_LibraryInfo.Marshal(b, m, deterministic)
-}
-func (m *LibraryInfo) XXX_Merge(src proto.Message) {
- xxx_messageInfo_LibraryInfo.Merge(m, src)
-}
-func (m *LibraryInfo) XXX_Size() int {
- return xxx_messageInfo_LibraryInfo.Size(m)
-}
-func (m *LibraryInfo) XXX_DiscardUnknown() {
- xxx_messageInfo_LibraryInfo.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_LibraryInfo proto.InternalMessageInfo
-
-func (m *LibraryInfo) GetLanguage() LibraryInfo_Language {
- if m != nil {
- return m.Language
- }
- return LibraryInfo_LANGUAGE_UNSPECIFIED
-}
-
-func (m *LibraryInfo) GetExporterVersion() string {
- if m != nil {
- return m.ExporterVersion
- }
- return ""
-}
-
-func (m *LibraryInfo) GetCoreLibraryVersion() string {
- if m != nil {
- return m.CoreLibraryVersion
- }
- return ""
-}
-
-// Additional service information.
-type ServiceInfo struct {
- // Name of the service.
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ServiceInfo) Reset() { *m = ServiceInfo{} }
-func (m *ServiceInfo) String() string { return proto.CompactTextString(m) }
-func (*ServiceInfo) ProtoMessage() {}
-func (*ServiceInfo) Descriptor() ([]byte, []int) {
- return fileDescriptor_126c72ed8a252c84, []int{3}
-}
-
-func (m *ServiceInfo) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ServiceInfo.Unmarshal(m, b)
-}
-func (m *ServiceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ServiceInfo.Marshal(b, m, deterministic)
-}
-func (m *ServiceInfo) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ServiceInfo.Merge(m, src)
-}
-func (m *ServiceInfo) XXX_Size() int {
- return xxx_messageInfo_ServiceInfo.Size(m)
-}
-func (m *ServiceInfo) XXX_DiscardUnknown() {
- xxx_messageInfo_ServiceInfo.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ServiceInfo proto.InternalMessageInfo
-
-func (m *ServiceInfo) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func init() {
- proto.RegisterEnum("opencensus.proto.agent.common.v1.LibraryInfo_Language", LibraryInfo_Language_name, LibraryInfo_Language_value)
- proto.RegisterType((*Node)(nil), "opencensus.proto.agent.common.v1.Node")
- proto.RegisterMapType((map[string]string)(nil), "opencensus.proto.agent.common.v1.Node.AttributesEntry")
- proto.RegisterType((*ProcessIdentifier)(nil), "opencensus.proto.agent.common.v1.ProcessIdentifier")
- proto.RegisterType((*LibraryInfo)(nil), "opencensus.proto.agent.common.v1.LibraryInfo")
- proto.RegisterType((*ServiceInfo)(nil), "opencensus.proto.agent.common.v1.ServiceInfo")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/agent/common/v1/common.proto", fileDescriptor_126c72ed8a252c84)
-}
-
-var fileDescriptor_126c72ed8a252c84 = []byte{
- // 590 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0x4f, 0x4f, 0xdb, 0x3e,
- 0x1c, 0xc6, 0x7f, 0x69, 0x0a, 0xb4, 0xdf, 0xfc, 0x06, 0x99, 0xc5, 0xa1, 0x62, 0x87, 0xb1, 0xee,
- 0xc2, 0x0e, 0x4d, 0x06, 0x48, 0xd3, 0x34, 0x69, 0x87, 0x52, 0x3a, 0x28, 0x42, 0x25, 0x72, 0x01,
- 0x89, 0x5d, 0xa2, 0xb4, 0xb8, 0xc1, 0x5a, 0x63, 0x57, 0xb6, 0x53, 0x8d, 0xd3, 0x8e, 0xd3, 0xde,
- 0xc0, 0x5e, 0xd4, 0x5e, 0xd5, 0x64, 0x3b, 0x69, 0xa3, 0x71, 0x28, 0xb7, 0xef, 0x9f, 0xe7, 0xf9,
- 0x38, 0x7a, 0x6c, 0x05, 0x3a, 0x7c, 0x4e, 0xd8, 0x84, 0x30, 0x99, 0xcb, 0x70, 0x2e, 0xb8, 0xe2,
- 0x61, 0x92, 0x12, 0xa6, 0xc2, 0x09, 0xcf, 0x32, 0xce, 0xc2, 0xc5, 0x61, 0x51, 0x05, 0x66, 0x89,
- 0xf6, 0x57, 0x72, 0x3b, 0x09, 0x8c, 0x3c, 0x28, 0x44, 0x8b, 0xc3, 0xbd, 0xd7, 0x29, 0xe7, 0xe9,
- 0x8c, 0x58, 0xd8, 0x38, 0x9f, 0x86, 0x8a, 0x66, 0x44, 0xaa, 0x24, 0x9b, 0x5b, 0x43, 0xfb, 0xb7,
- 0x0b, 0xf5, 0x21, 0xbf, 0x27, 0x68, 0x04, 0x40, 0xef, 0x09, 0x53, 0x74, 0x4a, 0x89, 0x68, 0x39,
- 0xfb, 0xce, 0x81, 0x77, 0x74, 0x1c, 0xac, 0x3b, 0x20, 0x88, 0x04, 0x9f, 0x10, 0x29, 0x07, 0x4b,
- 0x2b, 0xae, 0x60, 0x50, 0x04, 0xff, 0xcf, 0xe8, 0x58, 0x24, 0xe2, 0x31, 0xa6, 0x6c, 0xca, 0x5b,
- 0x35, 0x83, 0xed, 0xac, 0xc7, 0x5e, 0x5a, 0xd7, 0x80, 0x4d, 0x39, 0xf6, 0x66, 0xab, 0x46, 0x13,
- 0x25, 0x11, 0x0b, 0x3a, 0x21, 0x96, 0xe8, 0x3e, 0x97, 0x38, 0xb2, 0x2e, 0x4b, 0x94, 0xab, 0x06,
- 0xdd, 0x02, 0x24, 0x4a, 0x09, 0x3a, 0xce, 0x15, 0x91, 0xad, 0xfa, 0xbe, 0x7b, 0xe0, 0x1d, 0x7d,
- 0x58, 0xcf, 0xd3, 0xa1, 0x05, 0xdd, 0xa5, 0xb1, 0xcf, 0x94, 0x78, 0xc4, 0x15, 0xd2, 0xde, 0x67,
- 0xd8, 0xf9, 0x67, 0x8d, 0x7c, 0x70, 0xbf, 0x91, 0x47, 0x13, 0x6e, 0x13, 0xeb, 0x12, 0xed, 0xc2,
- 0xc6, 0x22, 0x99, 0xe5, 0xc4, 0x24, 0xd3, 0xc4, 0xb6, 0xf9, 0x54, 0xfb, 0xe8, 0xb4, 0x7f, 0x3a,
- 0xf0, 0xf2, 0x49, 0xb8, 0xe8, 0x15, 0x34, 0x1f, 0xb8, 0x54, 0x31, 0x4b, 0x32, 0x52, 0x70, 0x1a,
- 0x7a, 0x30, 0x4c, 0x32, 0xa2, 0xf1, 0x73, 0x7a, 0x6f, 0x50, 0x2f, 0xb0, 0x2e, 0x51, 0x0f, 0x76,
- 0xa4, 0x4a, 0x84, 0x8a, 0x97, 0xd7, 0x5e, 0x04, 0xb6, 0x17, 0xd8, 0x87, 0x11, 0x94, 0x0f, 0x23,
- 0xb8, 0x2e, 0x15, 0x78, 0xdb, 0x58, 0x96, 0x7d, 0xfb, 0x4f, 0x0d, 0xbc, 0xca, 0x7d, 0x20, 0x0c,
- 0x8d, 0x59, 0xc2, 0xd2, 0x3c, 0x49, 0xed, 0x27, 0x6c, 0x3f, 0x27, 0xae, 0x0a, 0x20, 0xb8, 0x2c,
- 0xdc, 0x78, 0xc9, 0x41, 0xef, 0xc0, 0x27, 0xdf, 0xe7, 0x5c, 0x28, 0x22, 0xe2, 0x05, 0x11, 0x92,
- 0x72, 0x56, 0x44, 0xb2, 0x53, 0xce, 0x6f, 0xed, 0x18, 0xbd, 0x87, 0xdd, 0x09, 0x17, 0x24, 0x2e,
- 0x1f, 0x56, 0x29, 0x77, 0x8d, 0x1c, 0xe9, 0x5d, 0x71, 0x58, 0xe1, 0x68, 0xff, 0x72, 0xa0, 0x51,
- 0x9e, 0x89, 0x5a, 0xb0, 0x7b, 0xd9, 0x1d, 0x9e, 0xdd, 0x74, 0xcf, 0xfa, 0xf1, 0xcd, 0x70, 0x14,
- 0xf5, 0x7b, 0x83, 0x2f, 0x83, 0xfe, 0xa9, 0xff, 0x1f, 0xda, 0x02, 0xb7, 0x17, 0x45, 0xbe, 0x83,
- 0x3c, 0xd8, 0xea, 0xc5, 0xa3, 0xf3, 0x2e, 0x8e, 0xfc, 0x1a, 0x02, 0xd8, 0xec, 0x63, 0xed, 0xf0,
- 0x5d, 0xbd, 0x38, 0xbb, 0x8a, 0x4d, 0x53, 0x47, 0x0d, 0xa8, 0x5f, 0x74, 0x6f, 0xbb, 0xfe, 0x86,
- 0x1e, 0x0f, 0xaf, 0x4e, 0xfb, 0xf1, 0xc5, 0xc8, 0xdf, 0xd4, 0x94, 0xe8, 0x3c, 0xf2, 0xb7, 0xb4,
- 0x31, 0xba, 0xbb, 0x3e, 0xbf, 0x1a, 0xfa, 0x0d, 0xad, 0xc5, 0x37, 0x27, 0x77, 0x7e, 0xb3, 0xfd,
- 0x06, 0xbc, 0xca, 0x4b, 0x44, 0x08, 0xea, 0x95, 0xab, 0x34, 0xf5, 0xc9, 0x0f, 0x78, 0x4b, 0xf9,
- 0xda, 0x44, 0x4f, 0xbc, 0x9e, 0x29, 0x23, 0xbd, 0x8c, 0x9c, 0xaf, 0x83, 0x94, 0xaa, 0x87, 0x7c,
- 0xac, 0x05, 0xa1, 0xf5, 0x75, 0x28, 0x93, 0x4a, 0xe4, 0x19, 0x61, 0x2a, 0x51, 0x94, 0xb3, 0x70,
- 0x85, 0xec, 0xd8, 0x9f, 0x4b, 0x4a, 0x58, 0x27, 0x7d, 0xf2, 0x8f, 0x19, 0x6f, 0x9a, 0xed, 0xf1,
- 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0xe5, 0x77, 0x76, 0x8e, 0x04, 0x00, 0x00,
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1/metrics_service.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1/metrics_service.pb.go
deleted file mode 100644
index 801212d92..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1/metrics_service.pb.go
+++ /dev/null
@@ -1,264 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/agent/metrics/v1/metrics_service.proto
-
-package v1
-
-import (
- context "context"
- fmt "fmt"
- v1 "github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1"
- v11 "github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1"
- v12 "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1"
- proto "github.com/golang/protobuf/proto"
- grpc "google.golang.org/grpc"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-type ExportMetricsServiceRequest struct {
- // This is required only in the first message on the stream or if the
- // previous sent ExportMetricsServiceRequest message has a different Node (e.g.
- // when the same RPC is used to send Metrics from multiple Applications).
- Node *v1.Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"`
- // A list of metrics that belong to the last received Node.
- Metrics []*v11.Metric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"`
- // The resource for the metrics in this message that do not have an explicit
- // resource set.
- // If unset, the most recently set resource in the RPC stream applies. It is
- // valid to never be set within a stream, e.g. when no resource info is known
- // at all or when all sent metrics have an explicit resource set.
- Resource *v12.Resource `protobuf:"bytes,3,opt,name=resource,proto3" json:"resource,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ExportMetricsServiceRequest) Reset() { *m = ExportMetricsServiceRequest{} }
-func (m *ExportMetricsServiceRequest) String() string { return proto.CompactTextString(m) }
-func (*ExportMetricsServiceRequest) ProtoMessage() {}
-func (*ExportMetricsServiceRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_47e253a956287d04, []int{0}
-}
-
-func (m *ExportMetricsServiceRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ExportMetricsServiceRequest.Unmarshal(m, b)
-}
-func (m *ExportMetricsServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ExportMetricsServiceRequest.Marshal(b, m, deterministic)
-}
-func (m *ExportMetricsServiceRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ExportMetricsServiceRequest.Merge(m, src)
-}
-func (m *ExportMetricsServiceRequest) XXX_Size() int {
- return xxx_messageInfo_ExportMetricsServiceRequest.Size(m)
-}
-func (m *ExportMetricsServiceRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_ExportMetricsServiceRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ExportMetricsServiceRequest proto.InternalMessageInfo
-
-func (m *ExportMetricsServiceRequest) GetNode() *v1.Node {
- if m != nil {
- return m.Node
- }
- return nil
-}
-
-func (m *ExportMetricsServiceRequest) GetMetrics() []*v11.Metric {
- if m != nil {
- return m.Metrics
- }
- return nil
-}
-
-func (m *ExportMetricsServiceRequest) GetResource() *v12.Resource {
- if m != nil {
- return m.Resource
- }
- return nil
-}
-
-type ExportMetricsServiceResponse struct {
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ExportMetricsServiceResponse) Reset() { *m = ExportMetricsServiceResponse{} }
-func (m *ExportMetricsServiceResponse) String() string { return proto.CompactTextString(m) }
-func (*ExportMetricsServiceResponse) ProtoMessage() {}
-func (*ExportMetricsServiceResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_47e253a956287d04, []int{1}
-}
-
-func (m *ExportMetricsServiceResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ExportMetricsServiceResponse.Unmarshal(m, b)
-}
-func (m *ExportMetricsServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ExportMetricsServiceResponse.Marshal(b, m, deterministic)
-}
-func (m *ExportMetricsServiceResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ExportMetricsServiceResponse.Merge(m, src)
-}
-func (m *ExportMetricsServiceResponse) XXX_Size() int {
- return xxx_messageInfo_ExportMetricsServiceResponse.Size(m)
-}
-func (m *ExportMetricsServiceResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_ExportMetricsServiceResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ExportMetricsServiceResponse proto.InternalMessageInfo
-
-func init() {
- proto.RegisterType((*ExportMetricsServiceRequest)(nil), "opencensus.proto.agent.metrics.v1.ExportMetricsServiceRequest")
- proto.RegisterType((*ExportMetricsServiceResponse)(nil), "opencensus.proto.agent.metrics.v1.ExportMetricsServiceResponse")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/agent/metrics/v1/metrics_service.proto", fileDescriptor_47e253a956287d04)
-}
-
-var fileDescriptor_47e253a956287d04 = []byte{
- // 340 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xc1, 0x4a, 0xf3, 0x40,
- 0x14, 0x85, 0xff, 0xf9, 0x2b, 0x55, 0xa6, 0xe0, 0x62, 0xdc, 0x94, 0x2a, 0x52, 0xab, 0x48, 0x45,
- 0x32, 0x63, 0xea, 0x42, 0x10, 0x54, 0x28, 0xb8, 0x11, 0x94, 0x12, 0x77, 0x6e, 0xa4, 0x4d, 0x2f,
- 0x71, 0x16, 0x99, 0x1b, 0x67, 0x26, 0xc1, 0x57, 0x70, 0xe5, 0x3b, 0xf8, 0x5c, 0x3e, 0x8c, 0x24,
- 0x93, 0xb4, 0x94, 0x18, 0x0b, 0xee, 0x2e, 0x99, 0xf3, 0x9d, 0x9c, 0x33, 0x73, 0xe9, 0x05, 0x26,
- 0xa0, 0x42, 0x50, 0x26, 0x35, 0x22, 0xd1, 0x68, 0x51, 0x4c, 0x23, 0x50, 0x56, 0xc4, 0x60, 0xb5,
- 0x0c, 0x8d, 0xc8, 0xfc, 0x6a, 0x7c, 0x36, 0xa0, 0x33, 0x19, 0x02, 0x2f, 0x64, 0xec, 0x60, 0x09,
- 0xba, 0x2f, 0xbc, 0x00, 0x79, 0xa9, 0xe6, 0x99, 0xdf, 0xf3, 0x1a, 0xbc, 0x43, 0x8c, 0x63, 0x54,
- 0xb9, 0xb5, 0x9b, 0x1c, 0xdf, 0x3b, 0xa9, 0xc9, 0xeb, 0x21, 0x4a, 0xe9, 0x69, 0x4d, 0xaa, 0xc1,
- 0x60, 0xaa, 0x43, 0xc8, 0xb5, 0xd5, 0xec, 0xc4, 0x83, 0x2f, 0x42, 0x77, 0x6f, 0xdf, 0x12, 0xd4,
- 0xf6, 0xde, 0x99, 0x3c, 0xba, 0x22, 0x01, 0xbc, 0xa6, 0x60, 0x2c, 0xbb, 0xa4, 0x1b, 0x0a, 0xe7,
- 0xd0, 0x25, 0x7d, 0x32, 0xec, 0x8c, 0x8e, 0x79, 0x43, 0xb1, 0x32, 0x6b, 0xe6, 0xf3, 0x07, 0x9c,
- 0x43, 0x50, 0x30, 0xec, 0x8a, 0x6e, 0x96, 0xc9, 0xba, 0xff, 0xfb, 0xad, 0x61, 0x67, 0x74, 0x58,
- 0xc7, 0x97, 0x37, 0xc2, 0x5d, 0x80, 0xa0, 0x62, 0xd8, 0x98, 0x6e, 0x55, 0x61, 0xbb, 0xad, 0xa6,
- 0xdf, 0x2f, 0xea, 0x64, 0x3e, 0x0f, 0xca, 0x39, 0x58, 0x70, 0x83, 0x7d, 0xba, 0xf7, 0x73, 0x3b,
- 0x93, 0xa0, 0x32, 0x30, 0xfa, 0x24, 0x74, 0x7b, 0xf5, 0x88, 0x7d, 0x10, 0xda, 0x76, 0x0c, 0xbb,
- 0xe6, 0x6b, 0xdf, 0x91, 0xff, 0x72, 0x79, 0xbd, 0x9b, 0x3f, 0xf3, 0x2e, 0xde, 0xe0, 0xdf, 0x90,
- 0x9c, 0x91, 0xf1, 0x3b, 0xa1, 0x47, 0x12, 0xd7, 0x7b, 0x8d, 0x77, 0x56, 0x6d, 0x26, 0xb9, 0x6a,
- 0x42, 0x9e, 0xee, 0x22, 0x69, 0x5f, 0xd2, 0x59, 0xfe, 0x48, 0xc2, 0x19, 0x78, 0x52, 0x19, 0xab,
- 0xd3, 0x18, 0x94, 0x9d, 0x5a, 0x89, 0x4a, 0x2c, 0xbd, 0x3d, 0xb7, 0x32, 0x11, 0x28, 0x2f, 0xaa,
- 0xef, 0xfb, 0xac, 0x5d, 0x1c, 0x9f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x16, 0x61, 0x3b, 0xc3,
- 0x1b, 0x03, 0x00, 0x00,
-}
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ context.Context
-var _ grpc.ClientConn
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the grpc package it is being compiled against.
-const _ = grpc.SupportPackageIsVersion4
-
-// MetricsServiceClient is the client API for MetricsService service.
-//
-// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
-type MetricsServiceClient interface {
- // For performance reasons, it is recommended to keep this RPC
- // alive for the entire life of the application.
- Export(ctx context.Context, opts ...grpc.CallOption) (MetricsService_ExportClient, error)
-}
-
-type metricsServiceClient struct {
- cc *grpc.ClientConn
-}
-
-func NewMetricsServiceClient(cc *grpc.ClientConn) MetricsServiceClient {
- return &metricsServiceClient{cc}
-}
-
-func (c *metricsServiceClient) Export(ctx context.Context, opts ...grpc.CallOption) (MetricsService_ExportClient, error) {
- stream, err := c.cc.NewStream(ctx, &_MetricsService_serviceDesc.Streams[0], "/opencensus.proto.agent.metrics.v1.MetricsService/Export", opts...)
- if err != nil {
- return nil, err
- }
- x := &metricsServiceExportClient{stream}
- return x, nil
-}
-
-type MetricsService_ExportClient interface {
- Send(*ExportMetricsServiceRequest) error
- Recv() (*ExportMetricsServiceResponse, error)
- grpc.ClientStream
-}
-
-type metricsServiceExportClient struct {
- grpc.ClientStream
-}
-
-func (x *metricsServiceExportClient) Send(m *ExportMetricsServiceRequest) error {
- return x.ClientStream.SendMsg(m)
-}
-
-func (x *metricsServiceExportClient) Recv() (*ExportMetricsServiceResponse, error) {
- m := new(ExportMetricsServiceResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-// MetricsServiceServer is the server API for MetricsService service.
-type MetricsServiceServer interface {
- // For performance reasons, it is recommended to keep this RPC
- // alive for the entire life of the application.
- Export(MetricsService_ExportServer) error
-}
-
-func RegisterMetricsServiceServer(s *grpc.Server, srv MetricsServiceServer) {
- s.RegisterService(&_MetricsService_serviceDesc, srv)
-}
-
-func _MetricsService_Export_Handler(srv interface{}, stream grpc.ServerStream) error {
- return srv.(MetricsServiceServer).Export(&metricsServiceExportServer{stream})
-}
-
-type MetricsService_ExportServer interface {
- Send(*ExportMetricsServiceResponse) error
- Recv() (*ExportMetricsServiceRequest, error)
- grpc.ServerStream
-}
-
-type metricsServiceExportServer struct {
- grpc.ServerStream
-}
-
-func (x *metricsServiceExportServer) Send(m *ExportMetricsServiceResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
-func (x *metricsServiceExportServer) Recv() (*ExportMetricsServiceRequest, error) {
- m := new(ExportMetricsServiceRequest)
- if err := x.ServerStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-var _MetricsService_serviceDesc = grpc.ServiceDesc{
- ServiceName: "opencensus.proto.agent.metrics.v1.MetricsService",
- HandlerType: (*MetricsServiceServer)(nil),
- Methods: []grpc.MethodDesc{},
- Streams: []grpc.StreamDesc{
- {
- StreamName: "Export",
- Handler: _MetricsService_Export_Handler,
- ServerStreams: true,
- ClientStreams: true,
- },
- },
- Metadata: "opencensus/proto/agent/metrics/v1/metrics_service.proto",
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.go
deleted file mode 100644
index e7c49a387..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.go
+++ /dev/null
@@ -1,443 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/agent/trace/v1/trace_service.proto
-
-package v1
-
-import (
- context "context"
- fmt "fmt"
- v1 "github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1"
- v12 "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1"
- v11 "github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1"
- proto "github.com/golang/protobuf/proto"
- grpc "google.golang.org/grpc"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-type CurrentLibraryConfig struct {
- // This is required only in the first message on the stream or if the
- // previous sent CurrentLibraryConfig message has a different Node (e.g.
- // when the same RPC is used to configure multiple Applications).
- Node *v1.Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"`
- // Current configuration.
- Config *v11.TraceConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *CurrentLibraryConfig) Reset() { *m = CurrentLibraryConfig{} }
-func (m *CurrentLibraryConfig) String() string { return proto.CompactTextString(m) }
-func (*CurrentLibraryConfig) ProtoMessage() {}
-func (*CurrentLibraryConfig) Descriptor() ([]byte, []int) {
- return fileDescriptor_7027f99caf7ac6a5, []int{0}
-}
-
-func (m *CurrentLibraryConfig) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_CurrentLibraryConfig.Unmarshal(m, b)
-}
-func (m *CurrentLibraryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_CurrentLibraryConfig.Marshal(b, m, deterministic)
-}
-func (m *CurrentLibraryConfig) XXX_Merge(src proto.Message) {
- xxx_messageInfo_CurrentLibraryConfig.Merge(m, src)
-}
-func (m *CurrentLibraryConfig) XXX_Size() int {
- return xxx_messageInfo_CurrentLibraryConfig.Size(m)
-}
-func (m *CurrentLibraryConfig) XXX_DiscardUnknown() {
- xxx_messageInfo_CurrentLibraryConfig.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_CurrentLibraryConfig proto.InternalMessageInfo
-
-func (m *CurrentLibraryConfig) GetNode() *v1.Node {
- if m != nil {
- return m.Node
- }
- return nil
-}
-
-func (m *CurrentLibraryConfig) GetConfig() *v11.TraceConfig {
- if m != nil {
- return m.Config
- }
- return nil
-}
-
-type UpdatedLibraryConfig struct {
- // This field is ignored when the RPC is used to configure only one Application.
- // This is required only in the first message on the stream or if the
- // previous sent UpdatedLibraryConfig message has a different Node.
- Node *v1.Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"`
- // Requested updated configuration.
- Config *v11.TraceConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *UpdatedLibraryConfig) Reset() { *m = UpdatedLibraryConfig{} }
-func (m *UpdatedLibraryConfig) String() string { return proto.CompactTextString(m) }
-func (*UpdatedLibraryConfig) ProtoMessage() {}
-func (*UpdatedLibraryConfig) Descriptor() ([]byte, []int) {
- return fileDescriptor_7027f99caf7ac6a5, []int{1}
-}
-
-func (m *UpdatedLibraryConfig) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_UpdatedLibraryConfig.Unmarshal(m, b)
-}
-func (m *UpdatedLibraryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_UpdatedLibraryConfig.Marshal(b, m, deterministic)
-}
-func (m *UpdatedLibraryConfig) XXX_Merge(src proto.Message) {
- xxx_messageInfo_UpdatedLibraryConfig.Merge(m, src)
-}
-func (m *UpdatedLibraryConfig) XXX_Size() int {
- return xxx_messageInfo_UpdatedLibraryConfig.Size(m)
-}
-func (m *UpdatedLibraryConfig) XXX_DiscardUnknown() {
- xxx_messageInfo_UpdatedLibraryConfig.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_UpdatedLibraryConfig proto.InternalMessageInfo
-
-func (m *UpdatedLibraryConfig) GetNode() *v1.Node {
- if m != nil {
- return m.Node
- }
- return nil
-}
-
-func (m *UpdatedLibraryConfig) GetConfig() *v11.TraceConfig {
- if m != nil {
- return m.Config
- }
- return nil
-}
-
-type ExportTraceServiceRequest struct {
- // This is required only in the first message on the stream or if the
- // previous sent ExportTraceServiceRequest message has a different Node (e.g.
- // when the same RPC is used to send Spans from multiple Applications).
- Node *v1.Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"`
- // A list of Spans that belong to the last received Node.
- Spans []*v11.Span `protobuf:"bytes,2,rep,name=spans,proto3" json:"spans,omitempty"`
- // The resource for the spans in this message that do not have an explicit
- // resource set.
- // If unset, the most recently set resource in the RPC stream applies. It is
- // valid to never be set within a stream, e.g. when no resource info is known.
- Resource *v12.Resource `protobuf:"bytes,3,opt,name=resource,proto3" json:"resource,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ExportTraceServiceRequest) Reset() { *m = ExportTraceServiceRequest{} }
-func (m *ExportTraceServiceRequest) String() string { return proto.CompactTextString(m) }
-func (*ExportTraceServiceRequest) ProtoMessage() {}
-func (*ExportTraceServiceRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_7027f99caf7ac6a5, []int{2}
-}
-
-func (m *ExportTraceServiceRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ExportTraceServiceRequest.Unmarshal(m, b)
-}
-func (m *ExportTraceServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ExportTraceServiceRequest.Marshal(b, m, deterministic)
-}
-func (m *ExportTraceServiceRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ExportTraceServiceRequest.Merge(m, src)
-}
-func (m *ExportTraceServiceRequest) XXX_Size() int {
- return xxx_messageInfo_ExportTraceServiceRequest.Size(m)
-}
-func (m *ExportTraceServiceRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_ExportTraceServiceRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ExportTraceServiceRequest proto.InternalMessageInfo
-
-func (m *ExportTraceServiceRequest) GetNode() *v1.Node {
- if m != nil {
- return m.Node
- }
- return nil
-}
-
-func (m *ExportTraceServiceRequest) GetSpans() []*v11.Span {
- if m != nil {
- return m.Spans
- }
- return nil
-}
-
-func (m *ExportTraceServiceRequest) GetResource() *v12.Resource {
- if m != nil {
- return m.Resource
- }
- return nil
-}
-
-type ExportTraceServiceResponse struct {
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ExportTraceServiceResponse) Reset() { *m = ExportTraceServiceResponse{} }
-func (m *ExportTraceServiceResponse) String() string { return proto.CompactTextString(m) }
-func (*ExportTraceServiceResponse) ProtoMessage() {}
-func (*ExportTraceServiceResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_7027f99caf7ac6a5, []int{3}
-}
-
-func (m *ExportTraceServiceResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ExportTraceServiceResponse.Unmarshal(m, b)
-}
-func (m *ExportTraceServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ExportTraceServiceResponse.Marshal(b, m, deterministic)
-}
-func (m *ExportTraceServiceResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ExportTraceServiceResponse.Merge(m, src)
-}
-func (m *ExportTraceServiceResponse) XXX_Size() int {
- return xxx_messageInfo_ExportTraceServiceResponse.Size(m)
-}
-func (m *ExportTraceServiceResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_ExportTraceServiceResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ExportTraceServiceResponse proto.InternalMessageInfo
-
-func init() {
- proto.RegisterType((*CurrentLibraryConfig)(nil), "opencensus.proto.agent.trace.v1.CurrentLibraryConfig")
- proto.RegisterType((*UpdatedLibraryConfig)(nil), "opencensus.proto.agent.trace.v1.UpdatedLibraryConfig")
- proto.RegisterType((*ExportTraceServiceRequest)(nil), "opencensus.proto.agent.trace.v1.ExportTraceServiceRequest")
- proto.RegisterType((*ExportTraceServiceResponse)(nil), "opencensus.proto.agent.trace.v1.ExportTraceServiceResponse")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/agent/trace/v1/trace_service.proto", fileDescriptor_7027f99caf7ac6a5)
-}
-
-var fileDescriptor_7027f99caf7ac6a5 = []byte{
- // 423 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x54, 0xbf, 0x6b, 0xdb, 0x40,
- 0x14, 0xee, 0xd9, 0xad, 0x28, 0xe7, 0x2e, 0x15, 0x1d, 0x54, 0x51, 0xb0, 0x11, 0xb4, 0x18, 0x5a,
- 0x9d, 0x2a, 0x1b, 0x2f, 0x2e, 0x74, 0xb0, 0x29, 0x74, 0x28, 0xc5, 0xc8, 0xed, 0x92, 0xc5, 0xc8,
- 0xd2, 0x8b, 0xa2, 0xc1, 0x77, 0xca, 0xdd, 0x49, 0x24, 0x90, 0x2d, 0x43, 0xf6, 0x0c, 0xf9, 0xc3,
- 0xf2, 0x17, 0x05, 0xdd, 0xc9, 0x3f, 0x12, 0x5b, 0x11, 0x24, 0x4b, 0xb6, 0x87, 0xde, 0xf7, 0x7d,
- 0xf7, 0xbd, 0x7b, 0xdf, 0x09, 0x0f, 0x59, 0x06, 0x34, 0x02, 0x2a, 0x72, 0xe1, 0x65, 0x9c, 0x49,
- 0xe6, 0x85, 0x09, 0x50, 0xe9, 0x49, 0x1e, 0x46, 0xe0, 0x15, 0xbe, 0x2e, 0x16, 0x02, 0x78, 0x91,
- 0x46, 0x40, 0x14, 0xc4, 0xec, 0x6e, 0x49, 0xfa, 0x0b, 0x51, 0x24, 0xa2, 0xb0, 0xa4, 0xf0, 0x6d,
- 0xb7, 0x46, 0x35, 0x62, 0xab, 0x15, 0xa3, 0xa5, 0xac, 0xae, 0x34, 0xdb, 0xfe, 0xba, 0x07, 0xe7,
- 0x20, 0x58, 0xce, 0xb5, 0x83, 0x75, 0x5d, 0x81, 0x3f, 0xef, 0x81, 0xef, 0x7b, 0xad, 0x60, 0xdf,
- 0x1a, 0x60, 0x8b, 0x88, 0xd1, 0xe3, 0x34, 0xd1, 0x68, 0xe7, 0x1a, 0xe1, 0x0f, 0xd3, 0x9c, 0x73,
- 0xa0, 0xf2, 0x4f, 0xba, 0xe4, 0x21, 0x3f, 0x9f, 0xaa, 0xb6, 0x39, 0xc6, 0xaf, 0x29, 0x8b, 0xc1,
- 0x42, 0x3d, 0xd4, 0xef, 0x0c, 0xbe, 0x90, 0x9a, 0xc9, 0xab, 0x71, 0x0a, 0x9f, 0xfc, 0x65, 0x31,
- 0x04, 0x8a, 0x63, 0xfe, 0xc4, 0x86, 0x3e, 0xc4, 0x6a, 0xd5, 0xb1, 0xd7, 0x37, 0x46, 0xfe, 0x95,
- 0x85, 0x3e, 0x33, 0xa8, 0x58, 0xca, 0xd4, 0xff, 0x2c, 0x0e, 0x25, 0xc4, 0x2f, 0xc7, 0xd4, 0x2d,
- 0xc2, 0x1f, 0x7f, 0x9d, 0x65, 0x8c, 0x4b, 0xd5, 0x9d, 0xeb, 0x60, 0x04, 0x70, 0x9a, 0x83, 0x90,
- 0xcf, 0x72, 0x36, 0xc2, 0x6f, 0x44, 0x16, 0x52, 0x61, 0xb5, 0x7a, 0xed, 0x7e, 0x67, 0xd0, 0x7d,
- 0xc4, 0xd8, 0x3c, 0x0b, 0x69, 0xa0, 0xd1, 0xe6, 0x04, 0xbf, 0x5d, 0x27, 0xc4, 0x6a, 0xd7, 0x1d,
- 0xbb, 0xc9, 0x50, 0xe1, 0x93, 0xa0, 0xaa, 0x83, 0x0d, 0xcf, 0xf9, 0x84, 0xed, 0x43, 0x33, 0x89,
- 0x8c, 0x51, 0x01, 0x83, 0x9b, 0x16, 0x7e, 0xb7, 0xdb, 0x30, 0x2f, 0xb0, 0x51, 0x6d, 0x62, 0x44,
- 0x1a, 0x9e, 0x02, 0x39, 0x94, 0x2a, 0xbb, 0x99, 0x76, 0x68, 0xef, 0xce, 0xab, 0x3e, 0xfa, 0x8e,
- 0xcc, 0x2b, 0x84, 0x0d, 0xed, 0xd6, 0x1c, 0x37, 0xea, 0xd4, 0xae, 0xca, 0xfe, 0xf1, 0x24, 0xae,
- 0xbe, 0x12, 0xed, 0x64, 0x72, 0x89, 0xb0, 0x93, 0xb2, 0x26, 0x9d, 0xc9, 0xfb, 0x5d, 0x89, 0x59,
- 0x89, 0x98, 0xa1, 0xa3, 0xdf, 0x49, 0x2a, 0x4f, 0xf2, 0x65, 0x19, 0x05, 0x4f, 0x93, 0xdd, 0x94,
- 0x0a, 0xc9, 0xf3, 0x15, 0x50, 0x19, 0xca, 0x94, 0x51, 0x6f, 0xab, 0xeb, 0xea, 0x17, 0x9c, 0x00,
- 0x75, 0x93, 0x87, 0x7f, 0xa8, 0xa5, 0xa1, 0x9a, 0xc3, 0xbb, 0x00, 0x00, 0x00, 0xff, 0xff, 0xcf,
- 0x9c, 0x9b, 0xf7, 0xcb, 0x04, 0x00, 0x00,
-}
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ context.Context
-var _ grpc.ClientConn
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the grpc package it is being compiled against.
-const _ = grpc.SupportPackageIsVersion4
-
-// TraceServiceClient is the client API for TraceService service.
-//
-// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
-type TraceServiceClient interface {
- // After initialization, this RPC must be kept alive for the entire life of
- // the application. The agent pushes configs down to applications via a
- // stream.
- Config(ctx context.Context, opts ...grpc.CallOption) (TraceService_ConfigClient, error)
- // For performance reasons, it is recommended to keep this RPC
- // alive for the entire life of the application.
- Export(ctx context.Context, opts ...grpc.CallOption) (TraceService_ExportClient, error)
-}
-
-type traceServiceClient struct {
- cc *grpc.ClientConn
-}
-
-func NewTraceServiceClient(cc *grpc.ClientConn) TraceServiceClient {
- return &traceServiceClient{cc}
-}
-
-func (c *traceServiceClient) Config(ctx context.Context, opts ...grpc.CallOption) (TraceService_ConfigClient, error) {
- stream, err := c.cc.NewStream(ctx, &_TraceService_serviceDesc.Streams[0], "/opencensus.proto.agent.trace.v1.TraceService/Config", opts...)
- if err != nil {
- return nil, err
- }
- x := &traceServiceConfigClient{stream}
- return x, nil
-}
-
-type TraceService_ConfigClient interface {
- Send(*CurrentLibraryConfig) error
- Recv() (*UpdatedLibraryConfig, error)
- grpc.ClientStream
-}
-
-type traceServiceConfigClient struct {
- grpc.ClientStream
-}
-
-func (x *traceServiceConfigClient) Send(m *CurrentLibraryConfig) error {
- return x.ClientStream.SendMsg(m)
-}
-
-func (x *traceServiceConfigClient) Recv() (*UpdatedLibraryConfig, error) {
- m := new(UpdatedLibraryConfig)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-func (c *traceServiceClient) Export(ctx context.Context, opts ...grpc.CallOption) (TraceService_ExportClient, error) {
- stream, err := c.cc.NewStream(ctx, &_TraceService_serviceDesc.Streams[1], "/opencensus.proto.agent.trace.v1.TraceService/Export", opts...)
- if err != nil {
- return nil, err
- }
- x := &traceServiceExportClient{stream}
- return x, nil
-}
-
-type TraceService_ExportClient interface {
- Send(*ExportTraceServiceRequest) error
- Recv() (*ExportTraceServiceResponse, error)
- grpc.ClientStream
-}
-
-type traceServiceExportClient struct {
- grpc.ClientStream
-}
-
-func (x *traceServiceExportClient) Send(m *ExportTraceServiceRequest) error {
- return x.ClientStream.SendMsg(m)
-}
-
-func (x *traceServiceExportClient) Recv() (*ExportTraceServiceResponse, error) {
- m := new(ExportTraceServiceResponse)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-// TraceServiceServer is the server API for TraceService service.
-type TraceServiceServer interface {
- // After initialization, this RPC must be kept alive for the entire life of
- // the application. The agent pushes configs down to applications via a
- // stream.
- Config(TraceService_ConfigServer) error
- // For performance reasons, it is recommended to keep this RPC
- // alive for the entire life of the application.
- Export(TraceService_ExportServer) error
-}
-
-func RegisterTraceServiceServer(s *grpc.Server, srv TraceServiceServer) {
- s.RegisterService(&_TraceService_serviceDesc, srv)
-}
-
-func _TraceService_Config_Handler(srv interface{}, stream grpc.ServerStream) error {
- return srv.(TraceServiceServer).Config(&traceServiceConfigServer{stream})
-}
-
-type TraceService_ConfigServer interface {
- Send(*UpdatedLibraryConfig) error
- Recv() (*CurrentLibraryConfig, error)
- grpc.ServerStream
-}
-
-type traceServiceConfigServer struct {
- grpc.ServerStream
-}
-
-func (x *traceServiceConfigServer) Send(m *UpdatedLibraryConfig) error {
- return x.ServerStream.SendMsg(m)
-}
-
-func (x *traceServiceConfigServer) Recv() (*CurrentLibraryConfig, error) {
- m := new(CurrentLibraryConfig)
- if err := x.ServerStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-func _TraceService_Export_Handler(srv interface{}, stream grpc.ServerStream) error {
- return srv.(TraceServiceServer).Export(&traceServiceExportServer{stream})
-}
-
-type TraceService_ExportServer interface {
- Send(*ExportTraceServiceResponse) error
- Recv() (*ExportTraceServiceRequest, error)
- grpc.ServerStream
-}
-
-type traceServiceExportServer struct {
- grpc.ServerStream
-}
-
-func (x *traceServiceExportServer) Send(m *ExportTraceServiceResponse) error {
- return x.ServerStream.SendMsg(m)
-}
-
-func (x *traceServiceExportServer) Recv() (*ExportTraceServiceRequest, error) {
- m := new(ExportTraceServiceRequest)
- if err := x.ServerStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
-
-var _TraceService_serviceDesc = grpc.ServiceDesc{
- ServiceName: "opencensus.proto.agent.trace.v1.TraceService",
- HandlerType: (*TraceServiceServer)(nil),
- Methods: []grpc.MethodDesc{},
- Streams: []grpc.StreamDesc{
- {
- StreamName: "Config",
- Handler: _TraceService_Config_Handler,
- ServerStreams: true,
- ClientStreams: true,
- },
- {
- StreamName: "Export",
- Handler: _TraceService_Export_Handler,
- ServerStreams: true,
- ClientStreams: true,
- },
- },
- Metadata: "opencensus/proto/agent/trace/v1/trace_service.proto",
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.gw.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.gw.go
deleted file mode 100644
index bd4b8a827..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1/trace_service.pb.gw.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
-// source: opencensus/proto/agent/trace/v1/trace_service.proto
-
-/*
-Package v1 is a reverse proxy.
-
-It translates gRPC into RESTful JSON APIs.
-*/
-package v1
-
-import (
- "io"
- "net/http"
-
- "github.com/golang/protobuf/proto"
- "github.com/grpc-ecosystem/grpc-gateway/runtime"
- "github.com/grpc-ecosystem/grpc-gateway/utilities"
- "golang.org/x/net/context"
- "google.golang.org/grpc"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/status"
-)
-
-var _ codes.Code
-var _ io.Reader
-var _ status.Status
-var _ = runtime.String
-var _ = utilities.NewDoubleArray
-
-func request_TraceService_Export_0(ctx context.Context, marshaler runtime.Marshaler, client TraceServiceClient, req *http.Request, pathParams map[string]string) (TraceService_ExportClient, runtime.ServerMetadata, error) {
- var metadata runtime.ServerMetadata
- stream, err := client.Export(ctx)
- if err != nil {
- grpclog.Infof("Failed to start streaming: %v", err)
- return nil, metadata, err
- }
- newReader, berr := utilities.IOReaderFactory(req.Body)
- if berr != nil {
- return nil, metadata, berr
- }
- dec := marshaler.NewDecoder(newReader())
- handleSend := func() error {
- var protoReq ExportTraceServiceRequest
- err := dec.Decode(&protoReq)
- if err == io.EOF {
- return err
- }
- if err != nil {
- grpclog.Infof("Failed to decode request: %v", err)
- return err
- }
- if err := stream.Send(&protoReq); err != nil {
- grpclog.Infof("Failed to send request: %v", err)
- return err
- }
- return nil
- }
- if err := handleSend(); err != nil {
- if cerr := stream.CloseSend(); cerr != nil {
- grpclog.Infof("Failed to terminate client stream: %v", cerr)
- }
- if err == io.EOF {
- return stream, metadata, nil
- }
- return nil, metadata, err
- }
- go func() {
- for {
- if err := handleSend(); err != nil {
- break
- }
- }
- if err := stream.CloseSend(); err != nil {
- grpclog.Infof("Failed to terminate client stream: %v", err)
- }
- }()
- header, err := stream.Header()
- if err != nil {
- grpclog.Infof("Failed to get header from client: %v", err)
- return nil, metadata, err
- }
- metadata.HeaderMD = header
- return stream, metadata, nil
-}
-
-// RegisterTraceServiceHandlerFromEndpoint is same as RegisterTraceServiceHandler but
-// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
-func RegisterTraceServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
- conn, err := grpc.Dial(endpoint, opts...)
- if err != nil {
- return err
- }
- defer func() {
- if err != nil {
- if cerr := conn.Close(); cerr != nil {
- grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
- }
- return
- }
- go func() {
- <-ctx.Done()
- if cerr := conn.Close(); cerr != nil {
- grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
- }
- }()
- }()
-
- return RegisterTraceServiceHandler(ctx, mux, conn)
-}
-
-// RegisterTraceServiceHandler registers the http handlers for service TraceService to "mux".
-// The handlers forward requests to the grpc endpoint over "conn".
-func RegisterTraceServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
- return RegisterTraceServiceHandlerClient(ctx, mux, NewTraceServiceClient(conn))
-}
-
-// RegisterTraceServiceHandlerClient registers the http handlers for service TraceService
-// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TraceServiceClient".
-// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TraceServiceClient"
-// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
-// "TraceServiceClient" to call the correct interceptors.
-func RegisterTraceServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TraceServiceClient) error {
-
- mux.Handle("POST", pattern_TraceService_Export_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
- ctx, cancel := context.WithCancel(req.Context())
- defer cancel()
- inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
- rctx, err := runtime.AnnotateContext(ctx, mux, req)
- if err != nil {
- runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
- return
- }
- resp, md, err := request_TraceService_Export_0(rctx, inboundMarshaler, client, req, pathParams)
- ctx = runtime.NewServerMetadataContext(ctx, md)
- if err != nil {
- runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
- return
- }
-
- forward_TraceService_Export_0(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...)
-
- })
-
- return nil
-}
-
-var (
- pattern_TraceService_Export_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "trace"}, ""))
-)
-
-var (
- forward_TraceService_Export_0 = runtime.ForwardResponseStream
-)
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1/metrics.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1/metrics.pb.go
deleted file mode 100644
index 53b8aa99e..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1/metrics.pb.go
+++ /dev/null
@@ -1,1126 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/metrics/v1/metrics.proto
-
-package v1
-
-import (
- fmt "fmt"
- v1 "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1"
- proto "github.com/golang/protobuf/proto"
- timestamp "github.com/golang/protobuf/ptypes/timestamp"
- wrappers "github.com/golang/protobuf/ptypes/wrappers"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// The kind of metric. It describes how the data is reported.
-//
-// A gauge is an instantaneous measurement of a value.
-//
-// A cumulative measurement is a value accumulated over a time interval. In
-// a time series, cumulative measurements should have the same start time,
-// increasing values and increasing end times, until an event resets the
-// cumulative value to zero and sets a new start time for the following
-// points.
-type MetricDescriptor_Type int32
-
-const (
- // Do not use this default value.
- MetricDescriptor_UNSPECIFIED MetricDescriptor_Type = 0
- // Integer gauge. The value can go both up and down.
- MetricDescriptor_GAUGE_INT64 MetricDescriptor_Type = 1
- // Floating point gauge. The value can go both up and down.
- MetricDescriptor_GAUGE_DOUBLE MetricDescriptor_Type = 2
- // Distribution gauge measurement. The count and sum can go both up and
- // down. Recorded values are always >= 0.
- // Used in scenarios like a snapshot of time the current items in a queue
- // have spent there.
- MetricDescriptor_GAUGE_DISTRIBUTION MetricDescriptor_Type = 3
- // Integer cumulative measurement. The value cannot decrease, if resets
- // then the start_time should also be reset.
- MetricDescriptor_CUMULATIVE_INT64 MetricDescriptor_Type = 4
- // Floating point cumulative measurement. The value cannot decrease, if
- // resets then the start_time should also be reset. Recorded values are
- // always >= 0.
- MetricDescriptor_CUMULATIVE_DOUBLE MetricDescriptor_Type = 5
- // Distribution cumulative measurement. The count and sum cannot decrease,
- // if resets then the start_time should also be reset.
- MetricDescriptor_CUMULATIVE_DISTRIBUTION MetricDescriptor_Type = 6
- // Some frameworks implemented Histograms as a summary of observations
- // (usually things like request durations and response sizes). While it
- // also provides a total count of observations and a sum of all observed
- // values, it calculates configurable percentiles over a sliding time
- // window. This is not recommended, since it cannot be aggregated.
- MetricDescriptor_SUMMARY MetricDescriptor_Type = 7
-)
-
-var MetricDescriptor_Type_name = map[int32]string{
- 0: "UNSPECIFIED",
- 1: "GAUGE_INT64",
- 2: "GAUGE_DOUBLE",
- 3: "GAUGE_DISTRIBUTION",
- 4: "CUMULATIVE_INT64",
- 5: "CUMULATIVE_DOUBLE",
- 6: "CUMULATIVE_DISTRIBUTION",
- 7: "SUMMARY",
-}
-
-var MetricDescriptor_Type_value = map[string]int32{
- "UNSPECIFIED": 0,
- "GAUGE_INT64": 1,
- "GAUGE_DOUBLE": 2,
- "GAUGE_DISTRIBUTION": 3,
- "CUMULATIVE_INT64": 4,
- "CUMULATIVE_DOUBLE": 5,
- "CUMULATIVE_DISTRIBUTION": 6,
- "SUMMARY": 7,
-}
-
-func (x MetricDescriptor_Type) String() string {
- return proto.EnumName(MetricDescriptor_Type_name, int32(x))
-}
-
-func (MetricDescriptor_Type) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{1, 0}
-}
-
-// Defines a Metric which has one or more timeseries.
-type Metric struct {
- // The descriptor of the Metric.
- // TODO(issue #152): consider only sending the name of descriptor for
- // optimization.
- MetricDescriptor *MetricDescriptor `protobuf:"bytes,1,opt,name=metric_descriptor,json=metricDescriptor,proto3" json:"metric_descriptor,omitempty"`
- // One or more timeseries for a single metric, where each timeseries has
- // one or more points.
- Timeseries []*TimeSeries `protobuf:"bytes,2,rep,name=timeseries,proto3" json:"timeseries,omitempty"`
- // The resource for the metric. If unset, it may be set to a default value
- // provided for a sequence of messages in an RPC stream.
- Resource *v1.Resource `protobuf:"bytes,3,opt,name=resource,proto3" json:"resource,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Metric) Reset() { *m = Metric{} }
-func (m *Metric) String() string { return proto.CompactTextString(m) }
-func (*Metric) ProtoMessage() {}
-func (*Metric) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{0}
-}
-
-func (m *Metric) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Metric.Unmarshal(m, b)
-}
-func (m *Metric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Metric.Marshal(b, m, deterministic)
-}
-func (m *Metric) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Metric.Merge(m, src)
-}
-func (m *Metric) XXX_Size() int {
- return xxx_messageInfo_Metric.Size(m)
-}
-func (m *Metric) XXX_DiscardUnknown() {
- xxx_messageInfo_Metric.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Metric proto.InternalMessageInfo
-
-func (m *Metric) GetMetricDescriptor() *MetricDescriptor {
- if m != nil {
- return m.MetricDescriptor
- }
- return nil
-}
-
-func (m *Metric) GetTimeseries() []*TimeSeries {
- if m != nil {
- return m.Timeseries
- }
- return nil
-}
-
-func (m *Metric) GetResource() *v1.Resource {
- if m != nil {
- return m.Resource
- }
- return nil
-}
-
-// Defines a metric type and its schema.
-type MetricDescriptor struct {
- // The metric type, including its DNS name prefix. It must be unique.
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- // A detailed description of the metric, which can be used in documentation.
- Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"`
- // The unit in which the metric value is reported. Follows the format
- // described by http://unitsofmeasure.org/ucum.html.
- Unit string `protobuf:"bytes,3,opt,name=unit,proto3" json:"unit,omitempty"`
- Type MetricDescriptor_Type `protobuf:"varint,4,opt,name=type,proto3,enum=opencensus.proto.metrics.v1.MetricDescriptor_Type" json:"type,omitempty"`
- // The label keys associated with the metric descriptor.
- LabelKeys []*LabelKey `protobuf:"bytes,5,rep,name=label_keys,json=labelKeys,proto3" json:"label_keys,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *MetricDescriptor) Reset() { *m = MetricDescriptor{} }
-func (m *MetricDescriptor) String() string { return proto.CompactTextString(m) }
-func (*MetricDescriptor) ProtoMessage() {}
-func (*MetricDescriptor) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{1}
-}
-
-func (m *MetricDescriptor) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_MetricDescriptor.Unmarshal(m, b)
-}
-func (m *MetricDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_MetricDescriptor.Marshal(b, m, deterministic)
-}
-func (m *MetricDescriptor) XXX_Merge(src proto.Message) {
- xxx_messageInfo_MetricDescriptor.Merge(m, src)
-}
-func (m *MetricDescriptor) XXX_Size() int {
- return xxx_messageInfo_MetricDescriptor.Size(m)
-}
-func (m *MetricDescriptor) XXX_DiscardUnknown() {
- xxx_messageInfo_MetricDescriptor.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_MetricDescriptor proto.InternalMessageInfo
-
-func (m *MetricDescriptor) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *MetricDescriptor) GetDescription() string {
- if m != nil {
- return m.Description
- }
- return ""
-}
-
-func (m *MetricDescriptor) GetUnit() string {
- if m != nil {
- return m.Unit
- }
- return ""
-}
-
-func (m *MetricDescriptor) GetType() MetricDescriptor_Type {
- if m != nil {
- return m.Type
- }
- return MetricDescriptor_UNSPECIFIED
-}
-
-func (m *MetricDescriptor) GetLabelKeys() []*LabelKey {
- if m != nil {
- return m.LabelKeys
- }
- return nil
-}
-
-// Defines a label key associated with a metric descriptor.
-type LabelKey struct {
- // The key for the label.
- Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
- // A human-readable description of what this label key represents.
- Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *LabelKey) Reset() { *m = LabelKey{} }
-func (m *LabelKey) String() string { return proto.CompactTextString(m) }
-func (*LabelKey) ProtoMessage() {}
-func (*LabelKey) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{2}
-}
-
-func (m *LabelKey) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_LabelKey.Unmarshal(m, b)
-}
-func (m *LabelKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_LabelKey.Marshal(b, m, deterministic)
-}
-func (m *LabelKey) XXX_Merge(src proto.Message) {
- xxx_messageInfo_LabelKey.Merge(m, src)
-}
-func (m *LabelKey) XXX_Size() int {
- return xxx_messageInfo_LabelKey.Size(m)
-}
-func (m *LabelKey) XXX_DiscardUnknown() {
- xxx_messageInfo_LabelKey.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_LabelKey proto.InternalMessageInfo
-
-func (m *LabelKey) GetKey() string {
- if m != nil {
- return m.Key
- }
- return ""
-}
-
-func (m *LabelKey) GetDescription() string {
- if m != nil {
- return m.Description
- }
- return ""
-}
-
-// A collection of data points that describes the time-varying values
-// of a metric.
-type TimeSeries struct {
- // Must be present for cumulative metrics. The time when the cumulative value
- // was reset to zero. Exclusive. The cumulative value is over the time interval
- // (start_timestamp, timestamp]. If not specified, the backend can use the
- // previous recorded value.
- StartTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_timestamp,json=startTimestamp,proto3" json:"start_timestamp,omitempty"`
- // The set of label values that uniquely identify this timeseries. Applies to
- // all points. The order of label values must match that of label keys in the
- // metric descriptor.
- LabelValues []*LabelValue `protobuf:"bytes,2,rep,name=label_values,json=labelValues,proto3" json:"label_values,omitempty"`
- // The data points of this timeseries. Point.value type MUST match the
- // MetricDescriptor.type.
- Points []*Point `protobuf:"bytes,3,rep,name=points,proto3" json:"points,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *TimeSeries) Reset() { *m = TimeSeries{} }
-func (m *TimeSeries) String() string { return proto.CompactTextString(m) }
-func (*TimeSeries) ProtoMessage() {}
-func (*TimeSeries) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{3}
-}
-
-func (m *TimeSeries) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_TimeSeries.Unmarshal(m, b)
-}
-func (m *TimeSeries) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_TimeSeries.Marshal(b, m, deterministic)
-}
-func (m *TimeSeries) XXX_Merge(src proto.Message) {
- xxx_messageInfo_TimeSeries.Merge(m, src)
-}
-func (m *TimeSeries) XXX_Size() int {
- return xxx_messageInfo_TimeSeries.Size(m)
-}
-func (m *TimeSeries) XXX_DiscardUnknown() {
- xxx_messageInfo_TimeSeries.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_TimeSeries proto.InternalMessageInfo
-
-func (m *TimeSeries) GetStartTimestamp() *timestamp.Timestamp {
- if m != nil {
- return m.StartTimestamp
- }
- return nil
-}
-
-func (m *TimeSeries) GetLabelValues() []*LabelValue {
- if m != nil {
- return m.LabelValues
- }
- return nil
-}
-
-func (m *TimeSeries) GetPoints() []*Point {
- if m != nil {
- return m.Points
- }
- return nil
-}
-
-type LabelValue struct {
- // The value for the label.
- Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
- // If false the value field is ignored and considered not set.
- // This is used to differentiate a missing label from an empty string.
- HasValue bool `protobuf:"varint,2,opt,name=has_value,json=hasValue,proto3" json:"has_value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *LabelValue) Reset() { *m = LabelValue{} }
-func (m *LabelValue) String() string { return proto.CompactTextString(m) }
-func (*LabelValue) ProtoMessage() {}
-func (*LabelValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{4}
-}
-
-func (m *LabelValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_LabelValue.Unmarshal(m, b)
-}
-func (m *LabelValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_LabelValue.Marshal(b, m, deterministic)
-}
-func (m *LabelValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_LabelValue.Merge(m, src)
-}
-func (m *LabelValue) XXX_Size() int {
- return xxx_messageInfo_LabelValue.Size(m)
-}
-func (m *LabelValue) XXX_DiscardUnknown() {
- xxx_messageInfo_LabelValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_LabelValue proto.InternalMessageInfo
-
-func (m *LabelValue) GetValue() string {
- if m != nil {
- return m.Value
- }
- return ""
-}
-
-func (m *LabelValue) GetHasValue() bool {
- if m != nil {
- return m.HasValue
- }
- return false
-}
-
-// A timestamped measurement.
-type Point struct {
- // The moment when this point was recorded. Inclusive.
- // If not specified, the timestamp will be decided by the backend.
- Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
- // The actual point value.
- //
- // Types that are valid to be assigned to Value:
- // *Point_Int64Value
- // *Point_DoubleValue
- // *Point_DistributionValue
- // *Point_SummaryValue
- Value isPoint_Value `protobuf_oneof:"value"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Point) Reset() { *m = Point{} }
-func (m *Point) String() string { return proto.CompactTextString(m) }
-func (*Point) ProtoMessage() {}
-func (*Point) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{5}
-}
-
-func (m *Point) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Point.Unmarshal(m, b)
-}
-func (m *Point) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Point.Marshal(b, m, deterministic)
-}
-func (m *Point) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Point.Merge(m, src)
-}
-func (m *Point) XXX_Size() int {
- return xxx_messageInfo_Point.Size(m)
-}
-func (m *Point) XXX_DiscardUnknown() {
- xxx_messageInfo_Point.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Point proto.InternalMessageInfo
-
-func (m *Point) GetTimestamp() *timestamp.Timestamp {
- if m != nil {
- return m.Timestamp
- }
- return nil
-}
-
-type isPoint_Value interface {
- isPoint_Value()
-}
-
-type Point_Int64Value struct {
- Int64Value int64 `protobuf:"varint,2,opt,name=int64_value,json=int64Value,proto3,oneof"`
-}
-
-type Point_DoubleValue struct {
- DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"`
-}
-
-type Point_DistributionValue struct {
- DistributionValue *DistributionValue `protobuf:"bytes,4,opt,name=distribution_value,json=distributionValue,proto3,oneof"`
-}
-
-type Point_SummaryValue struct {
- SummaryValue *SummaryValue `protobuf:"bytes,5,opt,name=summary_value,json=summaryValue,proto3,oneof"`
-}
-
-func (*Point_Int64Value) isPoint_Value() {}
-
-func (*Point_DoubleValue) isPoint_Value() {}
-
-func (*Point_DistributionValue) isPoint_Value() {}
-
-func (*Point_SummaryValue) isPoint_Value() {}
-
-func (m *Point) GetValue() isPoint_Value {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *Point) GetInt64Value() int64 {
- if x, ok := m.GetValue().(*Point_Int64Value); ok {
- return x.Int64Value
- }
- return 0
-}
-
-func (m *Point) GetDoubleValue() float64 {
- if x, ok := m.GetValue().(*Point_DoubleValue); ok {
- return x.DoubleValue
- }
- return 0
-}
-
-func (m *Point) GetDistributionValue() *DistributionValue {
- if x, ok := m.GetValue().(*Point_DistributionValue); ok {
- return x.DistributionValue
- }
- return nil
-}
-
-func (m *Point) GetSummaryValue() *SummaryValue {
- if x, ok := m.GetValue().(*Point_SummaryValue); ok {
- return x.SummaryValue
- }
- return nil
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*Point) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*Point_Int64Value)(nil),
- (*Point_DoubleValue)(nil),
- (*Point_DistributionValue)(nil),
- (*Point_SummaryValue)(nil),
- }
-}
-
-// Distribution contains summary statistics for a population of values. It
-// optionally contains a histogram representing the distribution of those
-// values across a set of buckets.
-type DistributionValue struct {
- // The number of values in the population. Must be non-negative. This value
- // must equal the sum of the values in bucket_counts if a histogram is
- // provided.
- Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"`
- // The sum of the values in the population. If count is zero then this field
- // must be zero.
- Sum float64 `protobuf:"fixed64,2,opt,name=sum,proto3" json:"sum,omitempty"`
- // The sum of squared deviations from the mean of the values in the
- // population. For values x_i this is:
- //
- // Sum[i=1..n]((x_i - mean)^2)
- //
- // Knuth, "The Art of Computer Programming", Vol. 2, page 323, 3rd edition
- // describes Welford's method for accumulating this sum in one pass.
- //
- // If count is zero then this field must be zero.
- SumOfSquaredDeviation float64 `protobuf:"fixed64,3,opt,name=sum_of_squared_deviation,json=sumOfSquaredDeviation,proto3" json:"sum_of_squared_deviation,omitempty"`
- // Don't change bucket boundaries within a TimeSeries if your backend doesn't
- // support this.
- // TODO(issue #152): consider not required to send bucket options for
- // optimization.
- BucketOptions *DistributionValue_BucketOptions `protobuf:"bytes,4,opt,name=bucket_options,json=bucketOptions,proto3" json:"bucket_options,omitempty"`
- // If the distribution does not have a histogram, then omit this field.
- // If there is a histogram, then the sum of the values in the Bucket counts
- // must equal the value in the count field of the distribution.
- Buckets []*DistributionValue_Bucket `protobuf:"bytes,5,rep,name=buckets,proto3" json:"buckets,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DistributionValue) Reset() { *m = DistributionValue{} }
-func (m *DistributionValue) String() string { return proto.CompactTextString(m) }
-func (*DistributionValue) ProtoMessage() {}
-func (*DistributionValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{6}
-}
-
-func (m *DistributionValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DistributionValue.Unmarshal(m, b)
-}
-func (m *DistributionValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DistributionValue.Marshal(b, m, deterministic)
-}
-func (m *DistributionValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DistributionValue.Merge(m, src)
-}
-func (m *DistributionValue) XXX_Size() int {
- return xxx_messageInfo_DistributionValue.Size(m)
-}
-func (m *DistributionValue) XXX_DiscardUnknown() {
- xxx_messageInfo_DistributionValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DistributionValue proto.InternalMessageInfo
-
-func (m *DistributionValue) GetCount() int64 {
- if m != nil {
- return m.Count
- }
- return 0
-}
-
-func (m *DistributionValue) GetSum() float64 {
- if m != nil {
- return m.Sum
- }
- return 0
-}
-
-func (m *DistributionValue) GetSumOfSquaredDeviation() float64 {
- if m != nil {
- return m.SumOfSquaredDeviation
- }
- return 0
-}
-
-func (m *DistributionValue) GetBucketOptions() *DistributionValue_BucketOptions {
- if m != nil {
- return m.BucketOptions
- }
- return nil
-}
-
-func (m *DistributionValue) GetBuckets() []*DistributionValue_Bucket {
- if m != nil {
- return m.Buckets
- }
- return nil
-}
-
-// A Distribution may optionally contain a histogram of the values in the
-// population. The bucket boundaries for that histogram are described by
-// BucketOptions.
-//
-// If bucket_options has no type, then there is no histogram associated with
-// the Distribution.
-type DistributionValue_BucketOptions struct {
- // Types that are valid to be assigned to Type:
- // *DistributionValue_BucketOptions_Explicit_
- Type isDistributionValue_BucketOptions_Type `protobuf_oneof:"type"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DistributionValue_BucketOptions) Reset() { *m = DistributionValue_BucketOptions{} }
-func (m *DistributionValue_BucketOptions) String() string { return proto.CompactTextString(m) }
-func (*DistributionValue_BucketOptions) ProtoMessage() {}
-func (*DistributionValue_BucketOptions) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{6, 0}
-}
-
-func (m *DistributionValue_BucketOptions) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DistributionValue_BucketOptions.Unmarshal(m, b)
-}
-func (m *DistributionValue_BucketOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DistributionValue_BucketOptions.Marshal(b, m, deterministic)
-}
-func (m *DistributionValue_BucketOptions) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DistributionValue_BucketOptions.Merge(m, src)
-}
-func (m *DistributionValue_BucketOptions) XXX_Size() int {
- return xxx_messageInfo_DistributionValue_BucketOptions.Size(m)
-}
-func (m *DistributionValue_BucketOptions) XXX_DiscardUnknown() {
- xxx_messageInfo_DistributionValue_BucketOptions.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DistributionValue_BucketOptions proto.InternalMessageInfo
-
-type isDistributionValue_BucketOptions_Type interface {
- isDistributionValue_BucketOptions_Type()
-}
-
-type DistributionValue_BucketOptions_Explicit_ struct {
- Explicit *DistributionValue_BucketOptions_Explicit `protobuf:"bytes,1,opt,name=explicit,proto3,oneof"`
-}
-
-func (*DistributionValue_BucketOptions_Explicit_) isDistributionValue_BucketOptions_Type() {}
-
-func (m *DistributionValue_BucketOptions) GetType() isDistributionValue_BucketOptions_Type {
- if m != nil {
- return m.Type
- }
- return nil
-}
-
-func (m *DistributionValue_BucketOptions) GetExplicit() *DistributionValue_BucketOptions_Explicit {
- if x, ok := m.GetType().(*DistributionValue_BucketOptions_Explicit_); ok {
- return x.Explicit
- }
- return nil
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*DistributionValue_BucketOptions) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*DistributionValue_BucketOptions_Explicit_)(nil),
- }
-}
-
-// Specifies a set of buckets with arbitrary upper-bounds.
-// This defines size(bounds) + 1 (= N) buckets. The boundaries for bucket
-// index i are:
-//
-// [0, bucket_bounds[i]) for i == 0
-// [bucket_bounds[i-1], bucket_bounds[i]) for 0 < i < N-1
-// [bucket_bounds[i], +infinity) for i == N-1
-type DistributionValue_BucketOptions_Explicit struct {
- // The values must be strictly increasing and > 0.
- Bounds []float64 `protobuf:"fixed64,1,rep,packed,name=bounds,proto3" json:"bounds,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DistributionValue_BucketOptions_Explicit) Reset() {
- *m = DistributionValue_BucketOptions_Explicit{}
-}
-func (m *DistributionValue_BucketOptions_Explicit) String() string { return proto.CompactTextString(m) }
-func (*DistributionValue_BucketOptions_Explicit) ProtoMessage() {}
-func (*DistributionValue_BucketOptions_Explicit) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{6, 0, 0}
-}
-
-func (m *DistributionValue_BucketOptions_Explicit) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DistributionValue_BucketOptions_Explicit.Unmarshal(m, b)
-}
-func (m *DistributionValue_BucketOptions_Explicit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DistributionValue_BucketOptions_Explicit.Marshal(b, m, deterministic)
-}
-func (m *DistributionValue_BucketOptions_Explicit) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DistributionValue_BucketOptions_Explicit.Merge(m, src)
-}
-func (m *DistributionValue_BucketOptions_Explicit) XXX_Size() int {
- return xxx_messageInfo_DistributionValue_BucketOptions_Explicit.Size(m)
-}
-func (m *DistributionValue_BucketOptions_Explicit) XXX_DiscardUnknown() {
- xxx_messageInfo_DistributionValue_BucketOptions_Explicit.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DistributionValue_BucketOptions_Explicit proto.InternalMessageInfo
-
-func (m *DistributionValue_BucketOptions_Explicit) GetBounds() []float64 {
- if m != nil {
- return m.Bounds
- }
- return nil
-}
-
-type DistributionValue_Bucket struct {
- // The number of values in each bucket of the histogram, as described in
- // bucket_bounds.
- Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"`
- // If the distribution does not have a histogram, then omit this field.
- Exemplar *DistributionValue_Exemplar `protobuf:"bytes,2,opt,name=exemplar,proto3" json:"exemplar,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DistributionValue_Bucket) Reset() { *m = DistributionValue_Bucket{} }
-func (m *DistributionValue_Bucket) String() string { return proto.CompactTextString(m) }
-func (*DistributionValue_Bucket) ProtoMessage() {}
-func (*DistributionValue_Bucket) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{6, 1}
-}
-
-func (m *DistributionValue_Bucket) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DistributionValue_Bucket.Unmarshal(m, b)
-}
-func (m *DistributionValue_Bucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DistributionValue_Bucket.Marshal(b, m, deterministic)
-}
-func (m *DistributionValue_Bucket) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DistributionValue_Bucket.Merge(m, src)
-}
-func (m *DistributionValue_Bucket) XXX_Size() int {
- return xxx_messageInfo_DistributionValue_Bucket.Size(m)
-}
-func (m *DistributionValue_Bucket) XXX_DiscardUnknown() {
- xxx_messageInfo_DistributionValue_Bucket.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DistributionValue_Bucket proto.InternalMessageInfo
-
-func (m *DistributionValue_Bucket) GetCount() int64 {
- if m != nil {
- return m.Count
- }
- return 0
-}
-
-func (m *DistributionValue_Bucket) GetExemplar() *DistributionValue_Exemplar {
- if m != nil {
- return m.Exemplar
- }
- return nil
-}
-
-// Exemplars are example points that may be used to annotate aggregated
-// Distribution values. They are metadata that gives information about a
-// particular value added to a Distribution bucket.
-type DistributionValue_Exemplar struct {
- // Value of the exemplar point. It determines which bucket the exemplar
- // belongs to.
- Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"`
- // The observation (sampling) time of the above value.
- Timestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
- // Contextual information about the example value.
- Attachments map[string]string `protobuf:"bytes,3,rep,name=attachments,proto3" json:"attachments,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DistributionValue_Exemplar) Reset() { *m = DistributionValue_Exemplar{} }
-func (m *DistributionValue_Exemplar) String() string { return proto.CompactTextString(m) }
-func (*DistributionValue_Exemplar) ProtoMessage() {}
-func (*DistributionValue_Exemplar) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{6, 2}
-}
-
-func (m *DistributionValue_Exemplar) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DistributionValue_Exemplar.Unmarshal(m, b)
-}
-func (m *DistributionValue_Exemplar) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DistributionValue_Exemplar.Marshal(b, m, deterministic)
-}
-func (m *DistributionValue_Exemplar) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DistributionValue_Exemplar.Merge(m, src)
-}
-func (m *DistributionValue_Exemplar) XXX_Size() int {
- return xxx_messageInfo_DistributionValue_Exemplar.Size(m)
-}
-func (m *DistributionValue_Exemplar) XXX_DiscardUnknown() {
- xxx_messageInfo_DistributionValue_Exemplar.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DistributionValue_Exemplar proto.InternalMessageInfo
-
-func (m *DistributionValue_Exemplar) GetValue() float64 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-func (m *DistributionValue_Exemplar) GetTimestamp() *timestamp.Timestamp {
- if m != nil {
- return m.Timestamp
- }
- return nil
-}
-
-func (m *DistributionValue_Exemplar) GetAttachments() map[string]string {
- if m != nil {
- return m.Attachments
- }
- return nil
-}
-
-// The start_timestamp only applies to the count and sum in the SummaryValue.
-type SummaryValue struct {
- // The total number of recorded values since start_time. Optional since
- // some systems don't expose this.
- Count *wrappers.Int64Value `protobuf:"bytes,1,opt,name=count,proto3" json:"count,omitempty"`
- // The total sum of recorded values since start_time. Optional since some
- // systems don't expose this. If count is zero then this field must be zero.
- // This field must be unset if the sum is not available.
- Sum *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=sum,proto3" json:"sum,omitempty"`
- // Values calculated over an arbitrary time window.
- Snapshot *SummaryValue_Snapshot `protobuf:"bytes,3,opt,name=snapshot,proto3" json:"snapshot,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *SummaryValue) Reset() { *m = SummaryValue{} }
-func (m *SummaryValue) String() string { return proto.CompactTextString(m) }
-func (*SummaryValue) ProtoMessage() {}
-func (*SummaryValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{7}
-}
-
-func (m *SummaryValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SummaryValue.Unmarshal(m, b)
-}
-func (m *SummaryValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SummaryValue.Marshal(b, m, deterministic)
-}
-func (m *SummaryValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SummaryValue.Merge(m, src)
-}
-func (m *SummaryValue) XXX_Size() int {
- return xxx_messageInfo_SummaryValue.Size(m)
-}
-func (m *SummaryValue) XXX_DiscardUnknown() {
- xxx_messageInfo_SummaryValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_SummaryValue proto.InternalMessageInfo
-
-func (m *SummaryValue) GetCount() *wrappers.Int64Value {
- if m != nil {
- return m.Count
- }
- return nil
-}
-
-func (m *SummaryValue) GetSum() *wrappers.DoubleValue {
- if m != nil {
- return m.Sum
- }
- return nil
-}
-
-func (m *SummaryValue) GetSnapshot() *SummaryValue_Snapshot {
- if m != nil {
- return m.Snapshot
- }
- return nil
-}
-
-// The values in this message can be reset at arbitrary unknown times, with
-// the requirement that all of them are reset at the same time.
-type SummaryValue_Snapshot struct {
- // The number of values in the snapshot. Optional since some systems don't
- // expose this.
- Count *wrappers.Int64Value `protobuf:"bytes,1,opt,name=count,proto3" json:"count,omitempty"`
- // The sum of values in the snapshot. Optional since some systems don't
- // expose this. If count is zero then this field must be zero or not set
- // (if not supported).
- Sum *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=sum,proto3" json:"sum,omitempty"`
- // A list of values at different percentiles of the distribution calculated
- // from the current snapshot. The percentiles must be strictly increasing.
- PercentileValues []*SummaryValue_Snapshot_ValueAtPercentile `protobuf:"bytes,3,rep,name=percentile_values,json=percentileValues,proto3" json:"percentile_values,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *SummaryValue_Snapshot) Reset() { *m = SummaryValue_Snapshot{} }
-func (m *SummaryValue_Snapshot) String() string { return proto.CompactTextString(m) }
-func (*SummaryValue_Snapshot) ProtoMessage() {}
-func (*SummaryValue_Snapshot) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{7, 0}
-}
-
-func (m *SummaryValue_Snapshot) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SummaryValue_Snapshot.Unmarshal(m, b)
-}
-func (m *SummaryValue_Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SummaryValue_Snapshot.Marshal(b, m, deterministic)
-}
-func (m *SummaryValue_Snapshot) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SummaryValue_Snapshot.Merge(m, src)
-}
-func (m *SummaryValue_Snapshot) XXX_Size() int {
- return xxx_messageInfo_SummaryValue_Snapshot.Size(m)
-}
-func (m *SummaryValue_Snapshot) XXX_DiscardUnknown() {
- xxx_messageInfo_SummaryValue_Snapshot.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_SummaryValue_Snapshot proto.InternalMessageInfo
-
-func (m *SummaryValue_Snapshot) GetCount() *wrappers.Int64Value {
- if m != nil {
- return m.Count
- }
- return nil
-}
-
-func (m *SummaryValue_Snapshot) GetSum() *wrappers.DoubleValue {
- if m != nil {
- return m.Sum
- }
- return nil
-}
-
-func (m *SummaryValue_Snapshot) GetPercentileValues() []*SummaryValue_Snapshot_ValueAtPercentile {
- if m != nil {
- return m.PercentileValues
- }
- return nil
-}
-
-// Represents the value at a given percentile of a distribution.
-type SummaryValue_Snapshot_ValueAtPercentile struct {
- // The percentile of a distribution. Must be in the interval
- // (0.0, 100.0].
- Percentile float64 `protobuf:"fixed64,1,opt,name=percentile,proto3" json:"percentile,omitempty"`
- // The value at the given percentile of a distribution.
- Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *SummaryValue_Snapshot_ValueAtPercentile) Reset() {
- *m = SummaryValue_Snapshot_ValueAtPercentile{}
-}
-func (m *SummaryValue_Snapshot_ValueAtPercentile) String() string { return proto.CompactTextString(m) }
-func (*SummaryValue_Snapshot_ValueAtPercentile) ProtoMessage() {}
-func (*SummaryValue_Snapshot_ValueAtPercentile) Descriptor() ([]byte, []int) {
- return fileDescriptor_0ee3deb72053811a, []int{7, 0, 0}
-}
-
-func (m *SummaryValue_Snapshot_ValueAtPercentile) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile.Unmarshal(m, b)
-}
-func (m *SummaryValue_Snapshot_ValueAtPercentile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile.Marshal(b, m, deterministic)
-}
-func (m *SummaryValue_Snapshot_ValueAtPercentile) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile.Merge(m, src)
-}
-func (m *SummaryValue_Snapshot_ValueAtPercentile) XXX_Size() int {
- return xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile.Size(m)
-}
-func (m *SummaryValue_Snapshot_ValueAtPercentile) XXX_DiscardUnknown() {
- xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_SummaryValue_Snapshot_ValueAtPercentile proto.InternalMessageInfo
-
-func (m *SummaryValue_Snapshot_ValueAtPercentile) GetPercentile() float64 {
- if m != nil {
- return m.Percentile
- }
- return 0
-}
-
-func (m *SummaryValue_Snapshot_ValueAtPercentile) GetValue() float64 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-func init() {
- proto.RegisterEnum("opencensus.proto.metrics.v1.MetricDescriptor_Type", MetricDescriptor_Type_name, MetricDescriptor_Type_value)
- proto.RegisterType((*Metric)(nil), "opencensus.proto.metrics.v1.Metric")
- proto.RegisterType((*MetricDescriptor)(nil), "opencensus.proto.metrics.v1.MetricDescriptor")
- proto.RegisterType((*LabelKey)(nil), "opencensus.proto.metrics.v1.LabelKey")
- proto.RegisterType((*TimeSeries)(nil), "opencensus.proto.metrics.v1.TimeSeries")
- proto.RegisterType((*LabelValue)(nil), "opencensus.proto.metrics.v1.LabelValue")
- proto.RegisterType((*Point)(nil), "opencensus.proto.metrics.v1.Point")
- proto.RegisterType((*DistributionValue)(nil), "opencensus.proto.metrics.v1.DistributionValue")
- proto.RegisterType((*DistributionValue_BucketOptions)(nil), "opencensus.proto.metrics.v1.DistributionValue.BucketOptions")
- proto.RegisterType((*DistributionValue_BucketOptions_Explicit)(nil), "opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit")
- proto.RegisterType((*DistributionValue_Bucket)(nil), "opencensus.proto.metrics.v1.DistributionValue.Bucket")
- proto.RegisterType((*DistributionValue_Exemplar)(nil), "opencensus.proto.metrics.v1.DistributionValue.Exemplar")
- proto.RegisterMapType((map[string]string)(nil), "opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry")
- proto.RegisterType((*SummaryValue)(nil), "opencensus.proto.metrics.v1.SummaryValue")
- proto.RegisterType((*SummaryValue_Snapshot)(nil), "opencensus.proto.metrics.v1.SummaryValue.Snapshot")
- proto.RegisterType((*SummaryValue_Snapshot_ValueAtPercentile)(nil), "opencensus.proto.metrics.v1.SummaryValue.Snapshot.ValueAtPercentile")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/metrics/v1/metrics.proto", fileDescriptor_0ee3deb72053811a)
-}
-
-var fileDescriptor_0ee3deb72053811a = []byte{
- // 1098 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xdd, 0x6e, 0x1b, 0xc5,
- 0x17, 0xcf, 0xda, 0x8e, 0xe3, 0x9c, 0x75, 0xdb, 0xf5, 0xa8, 0xed, 0xdf, 0xda, 0xfc, 0x15, 0xc2,
- 0x22, 0x20, 0x15, 0xca, 0x5a, 0x31, 0xa5, 0xad, 0x2a, 0x54, 0x14, 0xc7, 0x6e, 0x62, 0xc8, 0x87,
- 0x35, 0xb6, 0x2b, 0xd1, 0x1b, 0x6b, 0xbd, 0x9e, 0x24, 0x4b, 0xbc, 0x1f, 0xdd, 0x99, 0x35, 0xf8,
- 0x05, 0x78, 0x04, 0xae, 0xb9, 0x45, 0x3c, 0x07, 0x57, 0x3c, 0x01, 0x4f, 0x81, 0x78, 0x03, 0xb4,
- 0x33, 0xb3, 0x1f, 0x89, 0xc1, 0xd4, 0x45, 0xe2, 0xee, 0x9c, 0x33, 0xe7, 0xfc, 0xfc, 0x3b, 0x9f,
- 0x5e, 0x78, 0xe4, 0x07, 0xc4, 0xb3, 0x89, 0x47, 0x23, 0xda, 0x08, 0x42, 0x9f, 0xf9, 0x0d, 0x97,
- 0xb0, 0xd0, 0xb1, 0x69, 0x63, 0xb6, 0x9f, 0x88, 0x26, 0x7f, 0x40, 0x5b, 0x99, 0xab, 0xb0, 0x98,
- 0xc9, 0xfb, 0x6c, 0x5f, 0x7f, 0xef, 0xd2, 0xf7, 0x2f, 0xa7, 0x44, 0x60, 0x8c, 0xa3, 0x8b, 0x06,
- 0x73, 0x5c, 0x42, 0x99, 0xe5, 0x06, 0xc2, 0x57, 0xdf, 0xbe, 0xed, 0xf0, 0x6d, 0x68, 0x05, 0x01,
- 0x09, 0x25, 0x96, 0xfe, 0xc9, 0x02, 0x91, 0x90, 0x50, 0x3f, 0x0a, 0x6d, 0x12, 0x33, 0x49, 0x64,
- 0xe1, 0x6c, 0xfc, 0xa1, 0x40, 0xf9, 0x94, 0xff, 0x38, 0x7a, 0x0d, 0x35, 0x41, 0x63, 0x34, 0x21,
- 0xd4, 0x0e, 0x9d, 0x80, 0xf9, 0x61, 0x5d, 0xd9, 0x51, 0x76, 0xd5, 0xe6, 0x9e, 0xb9, 0x84, 0xb1,
- 0x29, 0xe2, 0xdb, 0x69, 0x10, 0xd6, 0xdc, 0x5b, 0x16, 0x74, 0x04, 0xc0, 0xd3, 0x20, 0xa1, 0x43,
- 0x68, 0xbd, 0xb0, 0x53, 0xdc, 0x55, 0x9b, 0x1f, 0x2f, 0x05, 0x1d, 0x38, 0x2e, 0xe9, 0x73, 0x77,
- 0x9c, 0x0b, 0x45, 0x2d, 0xa8, 0x24, 0x19, 0xd4, 0x8b, 0x9c, 0xdb, 0x47, 0x8b, 0x30, 0x69, 0x8e,
- 0xb3, 0x7d, 0x13, 0x4b, 0x19, 0xa7, 0x71, 0xc6, 0x0f, 0x45, 0xd0, 0x6e, 0x73, 0x46, 0x08, 0x4a,
- 0x9e, 0xe5, 0x12, 0x9e, 0xf0, 0x26, 0xe6, 0x32, 0xda, 0x01, 0x35, 0x29, 0x85, 0xe3, 0x7b, 0xf5,
- 0x02, 0x7f, 0xca, 0x9b, 0xe2, 0xa8, 0xc8, 0x73, 0x18, 0xa7, 0xb2, 0x89, 0xb9, 0x8c, 0x5e, 0x42,
- 0x89, 0xcd, 0x03, 0x52, 0x2f, 0xed, 0x28, 0xbb, 0x77, 0x9b, 0xcd, 0x95, 0x4a, 0x67, 0x0e, 0xe6,
- 0x01, 0xc1, 0x3c, 0x1e, 0xb5, 0x01, 0xa6, 0xd6, 0x98, 0x4c, 0x47, 0xd7, 0x64, 0x4e, 0xeb, 0xeb,
- 0xbc, 0x66, 0x1f, 0x2e, 0x45, 0x3b, 0x89, 0xdd, 0xbf, 0x22, 0x73, 0xbc, 0x39, 0x95, 0x12, 0x35,
- 0x7e, 0x52, 0xa0, 0x14, 0x83, 0xa2, 0x7b, 0xa0, 0x0e, 0xcf, 0xfa, 0xbd, 0xce, 0x61, 0xf7, 0x65,
- 0xb7, 0xd3, 0xd6, 0xd6, 0x62, 0xc3, 0xd1, 0xc1, 0xf0, 0xa8, 0x33, 0xea, 0x9e, 0x0d, 0x9e, 0x3c,
- 0xd6, 0x14, 0xa4, 0x41, 0x55, 0x18, 0xda, 0xe7, 0xc3, 0xd6, 0x49, 0x47, 0x2b, 0xa0, 0x87, 0x80,
- 0xa4, 0xa5, 0xdb, 0x1f, 0xe0, 0x6e, 0x6b, 0x38, 0xe8, 0x9e, 0x9f, 0x69, 0x45, 0x74, 0x1f, 0xb4,
- 0xc3, 0xe1, 0xe9, 0xf0, 0xe4, 0x60, 0xd0, 0x7d, 0x95, 0xc4, 0x97, 0xd0, 0x03, 0xa8, 0xe5, 0xac,
- 0x12, 0x64, 0x1d, 0x6d, 0xc1, 0xff, 0xf2, 0xe6, 0x3c, 0x52, 0x19, 0xa9, 0xb0, 0xd1, 0x1f, 0x9e,
- 0x9e, 0x1e, 0xe0, 0xaf, 0xb5, 0x0d, 0xe3, 0x05, 0x54, 0x92, 0x14, 0x90, 0x06, 0xc5, 0x6b, 0x32,
- 0x97, 0xed, 0x88, 0xc5, 0x7f, 0xee, 0x86, 0xf1, 0x9b, 0x02, 0x90, 0xcd, 0x0d, 0x3a, 0x84, 0x7b,
- 0x94, 0x59, 0x21, 0x1b, 0xa5, 0x1b, 0x24, 0xc7, 0x59, 0x37, 0xc5, 0x0a, 0x99, 0xc9, 0x0a, 0xf1,
- 0x69, 0xe3, 0x1e, 0xf8, 0x2e, 0x0f, 0x49, 0x75, 0xf4, 0x25, 0x54, 0x45, 0x17, 0x66, 0xd6, 0x34,
- 0x7a, 0xcb, 0xd9, 0xe5, 0x49, 0xbc, 0x8a, 0xfd, 0xb1, 0x3a, 0x4d, 0x65, 0x8a, 0x9e, 0x43, 0x39,
- 0xf0, 0x1d, 0x8f, 0xd1, 0x7a, 0x91, 0xa3, 0x18, 0x4b, 0x51, 0x7a, 0xb1, 0x2b, 0x96, 0x11, 0xc6,
- 0x17, 0x00, 0x19, 0x2c, 0xba, 0x0f, 0xeb, 0x9c, 0x8f, 0xac, 0x8f, 0x50, 0xd0, 0x16, 0x6c, 0x5e,
- 0x59, 0x54, 0x30, 0xe5, 0xf5, 0xa9, 0xe0, 0xca, 0x95, 0x45, 0x79, 0x88, 0xf1, 0x4b, 0x01, 0xd6,
- 0x39, 0x24, 0x7a, 0x06, 0x9b, 0xab, 0x54, 0x24, 0x73, 0x46, 0xef, 0x83, 0xea, 0x78, 0xec, 0xc9,
- 0xe3, 0xdc, 0x4f, 0x14, 0x8f, 0xd7, 0x30, 0x70, 0xa3, 0x60, 0xf6, 0x01, 0x54, 0x27, 0x7e, 0x34,
- 0x9e, 0x12, 0xe9, 0x13, 0x6f, 0x86, 0x72, 0xbc, 0x86, 0x55, 0x61, 0x15, 0x4e, 0x23, 0x40, 0x13,
- 0x87, 0xb2, 0xd0, 0x19, 0x47, 0x71, 0xe3, 0xa4, 0x6b, 0x89, 0x53, 0x31, 0x97, 0x16, 0xa5, 0x9d,
- 0x0b, 0xe3, 0x58, 0xc7, 0x6b, 0xb8, 0x36, 0xb9, 0x6d, 0x44, 0x3d, 0xb8, 0x43, 0x23, 0xd7, 0xb5,
- 0xc2, 0xb9, 0xc4, 0x5e, 0xe7, 0xd8, 0x8f, 0x96, 0x62, 0xf7, 0x45, 0x44, 0x02, 0x5b, 0xa5, 0x39,
- 0xbd, 0xb5, 0x21, 0x2b, 0x6e, 0xfc, 0x5a, 0x86, 0xda, 0x02, 0x8b, 0xb8, 0x21, 0xb6, 0x1f, 0x79,
- 0x8c, 0xd7, 0xb3, 0x88, 0x85, 0x12, 0x0f, 0x31, 0x8d, 0x5c, 0x5e, 0x27, 0x05, 0xc7, 0x22, 0x7a,
- 0x0a, 0x75, 0x1a, 0xb9, 0x23, 0xff, 0x62, 0x44, 0xdf, 0x44, 0x56, 0x48, 0x26, 0xa3, 0x09, 0x99,
- 0x39, 0x16, 0x9f, 0x68, 0x5e, 0x2a, 0xfc, 0x80, 0x46, 0xee, 0xf9, 0x45, 0x5f, 0xbc, 0xb6, 0x93,
- 0x47, 0x64, 0xc3, 0xdd, 0x71, 0x64, 0x5f, 0x13, 0x36, 0xf2, 0xf9, 0xb0, 0x53, 0x59, 0xae, 0xcf,
- 0x57, 0x2b, 0x97, 0xd9, 0xe2, 0x20, 0xe7, 0x02, 0x03, 0xdf, 0x19, 0xe7, 0x55, 0x74, 0x0e, 0x1b,
- 0xc2, 0x90, 0xdc, 0x9b, 0xcf, 0xde, 0x09, 0x1d, 0x27, 0x28, 0xfa, 0x8f, 0x0a, 0xdc, 0xb9, 0xf1,
- 0x8b, 0xc8, 0x86, 0x0a, 0xf9, 0x2e, 0x98, 0x3a, 0xb6, 0xc3, 0xe4, 0xec, 0x75, 0xfe, 0x4d, 0x06,
- 0x66, 0x47, 0x82, 0x1d, 0xaf, 0xe1, 0x14, 0x58, 0x37, 0xa0, 0x92, 0xd8, 0xd1, 0x43, 0x28, 0x8f,
- 0xfd, 0xc8, 0x9b, 0xd0, 0xba, 0xb2, 0x53, 0xdc, 0x55, 0xb0, 0xd4, 0x5a, 0x65, 0x71, 0xa6, 0x75,
- 0x0a, 0x65, 0x81, 0xf8, 0x37, 0x3d, 0xec, 0xc7, 0x84, 0x89, 0x1b, 0x4c, 0xad, 0x90, 0x37, 0x52,
- 0x6d, 0x3e, 0x5d, 0x91, 0x70, 0x47, 0x86, 0xe3, 0x14, 0x48, 0xff, 0xbe, 0x10, 0x33, 0x14, 0xca,
- 0xcd, 0x65, 0x56, 0x92, 0x65, 0xbe, 0xb1, 0xa5, 0x85, 0x55, 0xb6, 0xf4, 0x1b, 0x50, 0x2d, 0xc6,
- 0x2c, 0xfb, 0xca, 0x25, 0xd9, 0xad, 0x39, 0x7e, 0x47, 0xd2, 0xe6, 0x41, 0x06, 0xd5, 0xf1, 0x58,
- 0x38, 0xc7, 0x79, 0x70, 0xfd, 0x05, 0x68, 0xb7, 0x1d, 0xfe, 0xe2, 0x74, 0xa7, 0x19, 0x16, 0x72,
- 0xe7, 0xea, 0x79, 0xe1, 0x99, 0x62, 0xfc, 0x5e, 0x84, 0x6a, 0x7e, 0xef, 0xd0, 0x7e, 0xbe, 0x09,
- 0x6a, 0x73, 0x6b, 0x21, 0xe5, 0x6e, 0x7a, 0x6b, 0x92, 0x0e, 0x99, 0xd9, 0x96, 0xa9, 0xcd, 0xff,
- 0x2f, 0x04, 0xb4, 0xb3, 0xc3, 0x23, 0x76, 0xf0, 0x0c, 0x2a, 0xd4, 0xb3, 0x02, 0x7a, 0xe5, 0x33,
- 0xf9, 0x0d, 0xd1, 0x7c, 0xeb, 0xbb, 0x60, 0xf6, 0x65, 0x24, 0x4e, 0x31, 0xf4, 0x9f, 0x0b, 0x50,
- 0x49, 0xcc, 0xff, 0x05, 0xff, 0x37, 0x50, 0x0b, 0x48, 0x68, 0x13, 0x8f, 0x39, 0xc9, 0x99, 0x4d,
- 0xba, 0xdc, 0x5e, 0x3d, 0x11, 0x93, 0xab, 0x07, 0xac, 0x97, 0x42, 0x62, 0x2d, 0x83, 0x17, 0xff,
- 0x5c, 0x7a, 0x17, 0x6a, 0x0b, 0x6e, 0x68, 0x1b, 0x20, 0x73, 0x94, 0xc3, 0x9b, 0xb3, 0xdc, 0xec,
- 0x7a, 0x32, 0xd7, 0xad, 0x19, 0x6c, 0x3b, 0xfe, 0x32, 0x9a, 0xad, 0xaa, 0xf8, 0x2a, 0xa2, 0xbd,
- 0xf8, 0xa1, 0xa7, 0xbc, 0x6e, 0x5f, 0x3a, 0xec, 0x2a, 0x1a, 0x9b, 0xb6, 0xef, 0x36, 0x44, 0xcc,
- 0x9e, 0xe3, 0x51, 0x16, 0x46, 0xf1, 0xcc, 0xf1, 0xeb, 0xd8, 0xc8, 0xe0, 0xf6, 0xc4, 0x27, 0xef,
- 0x25, 0xf1, 0xf6, 0x2e, 0xf3, 0x9f, 0xe0, 0xe3, 0x32, 0x7f, 0xf8, 0xf4, 0xcf, 0x00, 0x00, 0x00,
- 0xff, 0xff, 0x8e, 0xfc, 0xd7, 0x46, 0xa8, 0x0b, 0x00, 0x00,
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1/resource.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1/resource.pb.go
deleted file mode 100644
index 38faa9fdf..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1/resource.pb.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/resource/v1/resource.proto
-
-package v1
-
-import (
- fmt "fmt"
- proto "github.com/golang/protobuf/proto"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// Resource information.
-type Resource struct {
- // Type identifier for the resource.
- Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
- // Set of labels that describe the resource.
- Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Resource) Reset() { *m = Resource{} }
-func (m *Resource) String() string { return proto.CompactTextString(m) }
-func (*Resource) ProtoMessage() {}
-func (*Resource) Descriptor() ([]byte, []int) {
- return fileDescriptor_584700775a2fc762, []int{0}
-}
-
-func (m *Resource) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Resource.Unmarshal(m, b)
-}
-func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Resource.Marshal(b, m, deterministic)
-}
-func (m *Resource) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Resource.Merge(m, src)
-}
-func (m *Resource) XXX_Size() int {
- return xxx_messageInfo_Resource.Size(m)
-}
-func (m *Resource) XXX_DiscardUnknown() {
- xxx_messageInfo_Resource.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Resource proto.InternalMessageInfo
-
-func (m *Resource) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
-func (m *Resource) GetLabels() map[string]string {
- if m != nil {
- return m.Labels
- }
- return nil
-}
-
-func init() {
- proto.RegisterType((*Resource)(nil), "opencensus.proto.resource.v1.Resource")
- proto.RegisterMapType((map[string]string)(nil), "opencensus.proto.resource.v1.Resource.LabelsEntry")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/resource/v1/resource.proto", fileDescriptor_584700775a2fc762)
-}
-
-var fileDescriptor_584700775a2fc762 = []byte{
- // 234 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0xce, 0x2f, 0x48, 0xcd,
- 0x4b, 0x4e, 0xcd, 0x2b, 0x2e, 0x2d, 0xd6, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0xd7, 0x2f, 0x4a, 0x2d,
- 0xce, 0x2f, 0x2d, 0x4a, 0x4e, 0xd5, 0x2f, 0x33, 0x84, 0xb3, 0xf5, 0xc0, 0x52, 0x42, 0x32, 0x08,
- 0xc5, 0x10, 0x11, 0x3d, 0xb8, 0x82, 0x32, 0x43, 0xa5, 0xa5, 0x8c, 0x5c, 0x1c, 0x41, 0x50, 0xbe,
- 0x90, 0x10, 0x17, 0x4b, 0x49, 0x65, 0x41, 0xaa, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x98,
- 0x2d, 0xe4, 0xc5, 0xc5, 0x96, 0x93, 0x98, 0x94, 0x9a, 0x53, 0x2c, 0xc1, 0xa4, 0xc0, 0xac, 0xc1,
- 0x6d, 0x64, 0xa4, 0x87, 0xcf, 0x3c, 0x3d, 0x98, 0x59, 0x7a, 0x3e, 0x60, 0x4d, 0xae, 0x79, 0x25,
- 0x45, 0x95, 0x41, 0x50, 0x13, 0xa4, 0x2c, 0xb9, 0xb8, 0x91, 0x84, 0x85, 0x04, 0xb8, 0x98, 0xb3,
- 0x53, 0x2b, 0xa1, 0xb6, 0x81, 0x98, 0x42, 0x22, 0x5c, 0xac, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0x12,
- 0x4c, 0x60, 0x31, 0x08, 0xc7, 0x8a, 0xc9, 0x82, 0xd1, 0xa9, 0x92, 0x4b, 0x3e, 0x33, 0x1f, 0xaf,
- 0xd5, 0x4e, 0xbc, 0x30, 0xbb, 0x03, 0x40, 0x52, 0x01, 0x8c, 0x51, 0xae, 0xe9, 0x99, 0x25, 0x19,
- 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0x10, 0x5d, 0xba, 0x99, 0x79, 0xc5, 0x25, 0x45, 0xa5,
- 0xb9, 0xa9, 0x79, 0x25, 0x89, 0x25, 0x99, 0xf9, 0x79, 0xfa, 0x08, 0x03, 0x75, 0x21, 0x01, 0x99,
- 0x9e, 0x9a, 0xa7, 0x9b, 0x8e, 0x12, 0x9e, 0x49, 0x6c, 0x60, 0x19, 0x63, 0x40, 0x00, 0x00, 0x00,
- 0xff, 0xff, 0x8e, 0x11, 0xaf, 0xda, 0x76, 0x01, 0x00, 0x00,
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace.pb.go
deleted file mode 100644
index 4de05355a..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace.pb.go
+++ /dev/null
@@ -1,1543 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/trace/v1/trace.proto
-
-package v1
-
-import (
- fmt "fmt"
- v1 "github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1"
- proto "github.com/golang/protobuf/proto"
- timestamp "github.com/golang/protobuf/ptypes/timestamp"
- wrappers "github.com/golang/protobuf/ptypes/wrappers"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// Type of span. Can be used to specify additional relationships between spans
-// in addition to a parent/child relationship.
-type Span_SpanKind int32
-
-const (
- // Unspecified.
- Span_SPAN_KIND_UNSPECIFIED Span_SpanKind = 0
- // Indicates that the span covers server-side handling of an RPC or other
- // remote network request.
- Span_SERVER Span_SpanKind = 1
- // Indicates that the span covers the client-side wrapper around an RPC or
- // other remote request.
- Span_CLIENT Span_SpanKind = 2
-)
-
-var Span_SpanKind_name = map[int32]string{
- 0: "SPAN_KIND_UNSPECIFIED",
- 1: "SERVER",
- 2: "CLIENT",
-}
-
-var Span_SpanKind_value = map[string]int32{
- "SPAN_KIND_UNSPECIFIED": 0,
- "SERVER": 1,
- "CLIENT": 2,
-}
-
-func (x Span_SpanKind) String() string {
- return proto.EnumName(Span_SpanKind_name, int32(x))
-}
-
-func (Span_SpanKind) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 0}
-}
-
-// Indicates whether the message was sent or received.
-type Span_TimeEvent_MessageEvent_Type int32
-
-const (
- // Unknown event type.
- Span_TimeEvent_MessageEvent_TYPE_UNSPECIFIED Span_TimeEvent_MessageEvent_Type = 0
- // Indicates a sent message.
- Span_TimeEvent_MessageEvent_SENT Span_TimeEvent_MessageEvent_Type = 1
- // Indicates a received message.
- Span_TimeEvent_MessageEvent_RECEIVED Span_TimeEvent_MessageEvent_Type = 2
-)
-
-var Span_TimeEvent_MessageEvent_Type_name = map[int32]string{
- 0: "TYPE_UNSPECIFIED",
- 1: "SENT",
- 2: "RECEIVED",
-}
-
-var Span_TimeEvent_MessageEvent_Type_value = map[string]int32{
- "TYPE_UNSPECIFIED": 0,
- "SENT": 1,
- "RECEIVED": 2,
-}
-
-func (x Span_TimeEvent_MessageEvent_Type) String() string {
- return proto.EnumName(Span_TimeEvent_MessageEvent_Type_name, int32(x))
-}
-
-func (Span_TimeEvent_MessageEvent_Type) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 2, 1, 0}
-}
-
-// The relationship of the current span relative to the linked span: child,
-// parent, or unspecified.
-type Span_Link_Type int32
-
-const (
- // The relationship of the two spans is unknown, or known but other
- // than parent-child.
- Span_Link_TYPE_UNSPECIFIED Span_Link_Type = 0
- // The linked span is a child of the current span.
- Span_Link_CHILD_LINKED_SPAN Span_Link_Type = 1
- // The linked span is a parent of the current span.
- Span_Link_PARENT_LINKED_SPAN Span_Link_Type = 2
-)
-
-var Span_Link_Type_name = map[int32]string{
- 0: "TYPE_UNSPECIFIED",
- 1: "CHILD_LINKED_SPAN",
- 2: "PARENT_LINKED_SPAN",
-}
-
-var Span_Link_Type_value = map[string]int32{
- "TYPE_UNSPECIFIED": 0,
- "CHILD_LINKED_SPAN": 1,
- "PARENT_LINKED_SPAN": 2,
-}
-
-func (x Span_Link_Type) String() string {
- return proto.EnumName(Span_Link_Type_name, int32(x))
-}
-
-func (Span_Link_Type) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 4, 0}
-}
-
-// A span represents a single operation within a trace. Spans can be
-// nested to form a trace tree. Spans may also be linked to other spans
-// from the same or different trace. And form graphs. Often, a trace
-// contains a root span that describes the end-to-end latency, and one
-// or more subspans for its sub-operations. A trace can also contain
-// multiple root spans, or none at all. Spans do not need to be
-// contiguous - there may be gaps or overlaps between spans in a trace.
-//
-// The next id is 17.
-// TODO(bdrutu): Add an example.
-type Span struct {
- // A unique identifier for a trace. All spans from the same trace share
- // the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes
- // is considered invalid.
- //
- // This field is semantically required. Receiver should generate new
- // random trace_id if empty or invalid trace_id was received.
- //
- // This field is required.
- TraceId []byte `protobuf:"bytes,1,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"`
- // A unique identifier for a span within a trace, assigned when the span
- // is created. The ID is an 8-byte array. An ID with all zeroes is considered
- // invalid.
- //
- // This field is semantically required. Receiver should generate new
- // random span_id if empty or invalid span_id was received.
- //
- // This field is required.
- SpanId []byte `protobuf:"bytes,2,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"`
- // The Tracestate on the span.
- Tracestate *Span_Tracestate `protobuf:"bytes,15,opt,name=tracestate,proto3" json:"tracestate,omitempty"`
- // The `span_id` of this span's parent span. If this is a root span, then this
- // field must be empty. The ID is an 8-byte array.
- ParentSpanId []byte `protobuf:"bytes,3,opt,name=parent_span_id,json=parentSpanId,proto3" json:"parent_span_id,omitempty"`
- // A description of the span's operation.
- //
- // For example, the name can be a qualified method name or a file name
- // and a line number where the operation is called. A best practice is to use
- // the same display name at the same call point in an application.
- // This makes it easier to correlate spans in different traces.
- //
- // This field is semantically required to be set to non-empty string.
- // When null or empty string received - receiver may use string "name"
- // as a replacement. There might be smarted algorithms implemented by
- // receiver to fix the empty span name.
- //
- // This field is required.
- Name *TruncatableString `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
- // Distinguishes between spans generated in a particular context. For example,
- // two spans with the same name may be distinguished using `CLIENT` (caller)
- // and `SERVER` (callee) to identify queueing latency associated with the span.
- Kind Span_SpanKind `protobuf:"varint,14,opt,name=kind,proto3,enum=opencensus.proto.trace.v1.Span_SpanKind" json:"kind,omitempty"`
- // The start time of the span. On the client side, this is the time kept by
- // the local machine where the span execution starts. On the server side, this
- // is the time when the server's application handler starts running.
- //
- // This field is semantically required. When not set on receive -
- // receiver should set it to the value of end_time field if it was
- // set. Or to the current time if neither was set. It is important to
- // keep end_time > start_time for consistency.
- //
- // This field is required.
- StartTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
- // The end time of the span. On the client side, this is the time kept by
- // the local machine where the span execution ends. On the server side, this
- // is the time when the server application handler stops running.
- //
- // This field is semantically required. When not set on receive -
- // receiver should set it to start_time value. It is important to
- // keep end_time > start_time for consistency.
- //
- // This field is required.
- EndTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
- // A set of attributes on the span.
- Attributes *Span_Attributes `protobuf:"bytes,7,opt,name=attributes,proto3" json:"attributes,omitempty"`
- // A stack trace captured at the start of the span.
- StackTrace *StackTrace `protobuf:"bytes,8,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"`
- // The included time events.
- TimeEvents *Span_TimeEvents `protobuf:"bytes,9,opt,name=time_events,json=timeEvents,proto3" json:"time_events,omitempty"`
- // The included links.
- Links *Span_Links `protobuf:"bytes,10,opt,name=links,proto3" json:"links,omitempty"`
- // An optional final status for this span. Semantically when Status
- // wasn't set it is means span ended without errors and assume
- // Status.Ok (code = 0).
- Status *Status `protobuf:"bytes,11,opt,name=status,proto3" json:"status,omitempty"`
- // An optional resource that is associated with this span. If not set, this span
- // should be part of a batch that does include the resource information, unless resource
- // information is unknown.
- Resource *v1.Resource `protobuf:"bytes,16,opt,name=resource,proto3" json:"resource,omitempty"`
- // A highly recommended but not required flag that identifies when a
- // trace crosses a process boundary. True when the parent_span belongs
- // to the same process as the current span. This flag is most commonly
- // used to indicate the need to adjust time as clocks in different
- // processes may not be synchronized.
- SameProcessAsParentSpan *wrappers.BoolValue `protobuf:"bytes,12,opt,name=same_process_as_parent_span,json=sameProcessAsParentSpan,proto3" json:"same_process_as_parent_span,omitempty"`
- // An optional number of child spans that were generated while this span
- // was active. If set, allows an implementation to detect missing child spans.
- ChildSpanCount *wrappers.UInt32Value `protobuf:"bytes,13,opt,name=child_span_count,json=childSpanCount,proto3" json:"child_span_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span) Reset() { *m = Span{} }
-func (m *Span) String() string { return proto.CompactTextString(m) }
-func (*Span) ProtoMessage() {}
-func (*Span) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0}
-}
-
-func (m *Span) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span.Unmarshal(m, b)
-}
-func (m *Span) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span.Marshal(b, m, deterministic)
-}
-func (m *Span) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span.Merge(m, src)
-}
-func (m *Span) XXX_Size() int {
- return xxx_messageInfo_Span.Size(m)
-}
-func (m *Span) XXX_DiscardUnknown() {
- xxx_messageInfo_Span.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span proto.InternalMessageInfo
-
-func (m *Span) GetTraceId() []byte {
- if m != nil {
- return m.TraceId
- }
- return nil
-}
-
-func (m *Span) GetSpanId() []byte {
- if m != nil {
- return m.SpanId
- }
- return nil
-}
-
-func (m *Span) GetTracestate() *Span_Tracestate {
- if m != nil {
- return m.Tracestate
- }
- return nil
-}
-
-func (m *Span) GetParentSpanId() []byte {
- if m != nil {
- return m.ParentSpanId
- }
- return nil
-}
-
-func (m *Span) GetName() *TruncatableString {
- if m != nil {
- return m.Name
- }
- return nil
-}
-
-func (m *Span) GetKind() Span_SpanKind {
- if m != nil {
- return m.Kind
- }
- return Span_SPAN_KIND_UNSPECIFIED
-}
-
-func (m *Span) GetStartTime() *timestamp.Timestamp {
- if m != nil {
- return m.StartTime
- }
- return nil
-}
-
-func (m *Span) GetEndTime() *timestamp.Timestamp {
- if m != nil {
- return m.EndTime
- }
- return nil
-}
-
-func (m *Span) GetAttributes() *Span_Attributes {
- if m != nil {
- return m.Attributes
- }
- return nil
-}
-
-func (m *Span) GetStackTrace() *StackTrace {
- if m != nil {
- return m.StackTrace
- }
- return nil
-}
-
-func (m *Span) GetTimeEvents() *Span_TimeEvents {
- if m != nil {
- return m.TimeEvents
- }
- return nil
-}
-
-func (m *Span) GetLinks() *Span_Links {
- if m != nil {
- return m.Links
- }
- return nil
-}
-
-func (m *Span) GetStatus() *Status {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-func (m *Span) GetResource() *v1.Resource {
- if m != nil {
- return m.Resource
- }
- return nil
-}
-
-func (m *Span) GetSameProcessAsParentSpan() *wrappers.BoolValue {
- if m != nil {
- return m.SameProcessAsParentSpan
- }
- return nil
-}
-
-func (m *Span) GetChildSpanCount() *wrappers.UInt32Value {
- if m != nil {
- return m.ChildSpanCount
- }
- return nil
-}
-
-// This field conveys information about request position in multiple distributed tracing graphs.
-// It is a list of Tracestate.Entry with a maximum of 32 members in the list.
-//
-// See the https://github.com/w3c/distributed-tracing for more details about this field.
-type Span_Tracestate struct {
- // A list of entries that represent the Tracestate.
- Entries []*Span_Tracestate_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_Tracestate) Reset() { *m = Span_Tracestate{} }
-func (m *Span_Tracestate) String() string { return proto.CompactTextString(m) }
-func (*Span_Tracestate) ProtoMessage() {}
-func (*Span_Tracestate) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 0}
-}
-
-func (m *Span_Tracestate) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_Tracestate.Unmarshal(m, b)
-}
-func (m *Span_Tracestate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_Tracestate.Marshal(b, m, deterministic)
-}
-func (m *Span_Tracestate) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_Tracestate.Merge(m, src)
-}
-func (m *Span_Tracestate) XXX_Size() int {
- return xxx_messageInfo_Span_Tracestate.Size(m)
-}
-func (m *Span_Tracestate) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_Tracestate.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_Tracestate proto.InternalMessageInfo
-
-func (m *Span_Tracestate) GetEntries() []*Span_Tracestate_Entry {
- if m != nil {
- return m.Entries
- }
- return nil
-}
-
-type Span_Tracestate_Entry struct {
- // The key must begin with a lowercase letter, and can only contain
- // lowercase letters 'a'-'z', digits '0'-'9', underscores '_', dashes
- // '-', asterisks '*', and forward slashes '/'.
- Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
- // The value is opaque string up to 256 characters printable ASCII
- // RFC0020 characters (i.e., the range 0x20 to 0x7E) except ',' and '='.
- // Note that this also excludes tabs, newlines, carriage returns, etc.
- Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_Tracestate_Entry) Reset() { *m = Span_Tracestate_Entry{} }
-func (m *Span_Tracestate_Entry) String() string { return proto.CompactTextString(m) }
-func (*Span_Tracestate_Entry) ProtoMessage() {}
-func (*Span_Tracestate_Entry) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 0, 0}
-}
-
-func (m *Span_Tracestate_Entry) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_Tracestate_Entry.Unmarshal(m, b)
-}
-func (m *Span_Tracestate_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_Tracestate_Entry.Marshal(b, m, deterministic)
-}
-func (m *Span_Tracestate_Entry) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_Tracestate_Entry.Merge(m, src)
-}
-func (m *Span_Tracestate_Entry) XXX_Size() int {
- return xxx_messageInfo_Span_Tracestate_Entry.Size(m)
-}
-func (m *Span_Tracestate_Entry) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_Tracestate_Entry.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_Tracestate_Entry proto.InternalMessageInfo
-
-func (m *Span_Tracestate_Entry) GetKey() string {
- if m != nil {
- return m.Key
- }
- return ""
-}
-
-func (m *Span_Tracestate_Entry) GetValue() string {
- if m != nil {
- return m.Value
- }
- return ""
-}
-
-// A set of attributes, each with a key and a value.
-type Span_Attributes struct {
- // The set of attributes. The value can be a string, an integer, a double
- // or the Boolean values `true` or `false`. Note, global attributes like
- // server name can be set as tags using resource API. Examples of attributes:
- //
- // "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
- // "/http/server_latency": 300
- // "abc.com/myattribute": true
- // "abc.com/score": 10.239
- AttributeMap map[string]*AttributeValue `protobuf:"bytes,1,rep,name=attribute_map,json=attributeMap,proto3" json:"attribute_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // The number of attributes that were discarded. Attributes can be discarded
- // because their keys are too long or because there are too many attributes.
- // If this value is 0, then no attributes were dropped.
- DroppedAttributesCount int32 `protobuf:"varint,2,opt,name=dropped_attributes_count,json=droppedAttributesCount,proto3" json:"dropped_attributes_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_Attributes) Reset() { *m = Span_Attributes{} }
-func (m *Span_Attributes) String() string { return proto.CompactTextString(m) }
-func (*Span_Attributes) ProtoMessage() {}
-func (*Span_Attributes) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 1}
-}
-
-func (m *Span_Attributes) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_Attributes.Unmarshal(m, b)
-}
-func (m *Span_Attributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_Attributes.Marshal(b, m, deterministic)
-}
-func (m *Span_Attributes) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_Attributes.Merge(m, src)
-}
-func (m *Span_Attributes) XXX_Size() int {
- return xxx_messageInfo_Span_Attributes.Size(m)
-}
-func (m *Span_Attributes) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_Attributes.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_Attributes proto.InternalMessageInfo
-
-func (m *Span_Attributes) GetAttributeMap() map[string]*AttributeValue {
- if m != nil {
- return m.AttributeMap
- }
- return nil
-}
-
-func (m *Span_Attributes) GetDroppedAttributesCount() int32 {
- if m != nil {
- return m.DroppedAttributesCount
- }
- return 0
-}
-
-// A time-stamped annotation or message event in the Span.
-type Span_TimeEvent struct {
- // The time the event occurred.
- Time *timestamp.Timestamp `protobuf:"bytes,1,opt,name=time,proto3" json:"time,omitempty"`
- // A `TimeEvent` can contain either an `Annotation` object or a
- // `MessageEvent` object, but not both.
- //
- // Types that are valid to be assigned to Value:
- // *Span_TimeEvent_Annotation_
- // *Span_TimeEvent_MessageEvent_
- Value isSpan_TimeEvent_Value `protobuf_oneof:"value"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_TimeEvent) Reset() { *m = Span_TimeEvent{} }
-func (m *Span_TimeEvent) String() string { return proto.CompactTextString(m) }
-func (*Span_TimeEvent) ProtoMessage() {}
-func (*Span_TimeEvent) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 2}
-}
-
-func (m *Span_TimeEvent) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_TimeEvent.Unmarshal(m, b)
-}
-func (m *Span_TimeEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_TimeEvent.Marshal(b, m, deterministic)
-}
-func (m *Span_TimeEvent) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_TimeEvent.Merge(m, src)
-}
-func (m *Span_TimeEvent) XXX_Size() int {
- return xxx_messageInfo_Span_TimeEvent.Size(m)
-}
-func (m *Span_TimeEvent) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_TimeEvent.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_TimeEvent proto.InternalMessageInfo
-
-func (m *Span_TimeEvent) GetTime() *timestamp.Timestamp {
- if m != nil {
- return m.Time
- }
- return nil
-}
-
-type isSpan_TimeEvent_Value interface {
- isSpan_TimeEvent_Value()
-}
-
-type Span_TimeEvent_Annotation_ struct {
- Annotation *Span_TimeEvent_Annotation `protobuf:"bytes,2,opt,name=annotation,proto3,oneof"`
-}
-
-type Span_TimeEvent_MessageEvent_ struct {
- MessageEvent *Span_TimeEvent_MessageEvent `protobuf:"bytes,3,opt,name=message_event,json=messageEvent,proto3,oneof"`
-}
-
-func (*Span_TimeEvent_Annotation_) isSpan_TimeEvent_Value() {}
-
-func (*Span_TimeEvent_MessageEvent_) isSpan_TimeEvent_Value() {}
-
-func (m *Span_TimeEvent) GetValue() isSpan_TimeEvent_Value {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *Span_TimeEvent) GetAnnotation() *Span_TimeEvent_Annotation {
- if x, ok := m.GetValue().(*Span_TimeEvent_Annotation_); ok {
- return x.Annotation
- }
- return nil
-}
-
-func (m *Span_TimeEvent) GetMessageEvent() *Span_TimeEvent_MessageEvent {
- if x, ok := m.GetValue().(*Span_TimeEvent_MessageEvent_); ok {
- return x.MessageEvent
- }
- return nil
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*Span_TimeEvent) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*Span_TimeEvent_Annotation_)(nil),
- (*Span_TimeEvent_MessageEvent_)(nil),
- }
-}
-
-// A text annotation with a set of attributes.
-type Span_TimeEvent_Annotation struct {
- // A user-supplied message describing the event.
- Description *TruncatableString `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"`
- // A set of attributes on the annotation.
- Attributes *Span_Attributes `protobuf:"bytes,2,opt,name=attributes,proto3" json:"attributes,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_TimeEvent_Annotation) Reset() { *m = Span_TimeEvent_Annotation{} }
-func (m *Span_TimeEvent_Annotation) String() string { return proto.CompactTextString(m) }
-func (*Span_TimeEvent_Annotation) ProtoMessage() {}
-func (*Span_TimeEvent_Annotation) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 2, 0}
-}
-
-func (m *Span_TimeEvent_Annotation) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_TimeEvent_Annotation.Unmarshal(m, b)
-}
-func (m *Span_TimeEvent_Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_TimeEvent_Annotation.Marshal(b, m, deterministic)
-}
-func (m *Span_TimeEvent_Annotation) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_TimeEvent_Annotation.Merge(m, src)
-}
-func (m *Span_TimeEvent_Annotation) XXX_Size() int {
- return xxx_messageInfo_Span_TimeEvent_Annotation.Size(m)
-}
-func (m *Span_TimeEvent_Annotation) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_TimeEvent_Annotation.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_TimeEvent_Annotation proto.InternalMessageInfo
-
-func (m *Span_TimeEvent_Annotation) GetDescription() *TruncatableString {
- if m != nil {
- return m.Description
- }
- return nil
-}
-
-func (m *Span_TimeEvent_Annotation) GetAttributes() *Span_Attributes {
- if m != nil {
- return m.Attributes
- }
- return nil
-}
-
-// An event describing a message sent/received between Spans.
-type Span_TimeEvent_MessageEvent struct {
- // The type of MessageEvent. Indicates whether the message was sent or
- // received.
- Type Span_TimeEvent_MessageEvent_Type `protobuf:"varint,1,opt,name=type,proto3,enum=opencensus.proto.trace.v1.Span_TimeEvent_MessageEvent_Type" json:"type,omitempty"`
- // An identifier for the MessageEvent's message that can be used to match
- // SENT and RECEIVED MessageEvents. For example, this field could
- // represent a sequence ID for a streaming RPC. It is recommended to be
- // unique within a Span.
- Id uint64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"`
- // The number of uncompressed bytes sent or received.
- UncompressedSize uint64 `protobuf:"varint,3,opt,name=uncompressed_size,json=uncompressedSize,proto3" json:"uncompressed_size,omitempty"`
- // The number of compressed bytes sent or received. If zero, assumed to
- // be the same size as uncompressed.
- CompressedSize uint64 `protobuf:"varint,4,opt,name=compressed_size,json=compressedSize,proto3" json:"compressed_size,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_TimeEvent_MessageEvent) Reset() { *m = Span_TimeEvent_MessageEvent{} }
-func (m *Span_TimeEvent_MessageEvent) String() string { return proto.CompactTextString(m) }
-func (*Span_TimeEvent_MessageEvent) ProtoMessage() {}
-func (*Span_TimeEvent_MessageEvent) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 2, 1}
-}
-
-func (m *Span_TimeEvent_MessageEvent) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_TimeEvent_MessageEvent.Unmarshal(m, b)
-}
-func (m *Span_TimeEvent_MessageEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_TimeEvent_MessageEvent.Marshal(b, m, deterministic)
-}
-func (m *Span_TimeEvent_MessageEvent) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_TimeEvent_MessageEvent.Merge(m, src)
-}
-func (m *Span_TimeEvent_MessageEvent) XXX_Size() int {
- return xxx_messageInfo_Span_TimeEvent_MessageEvent.Size(m)
-}
-func (m *Span_TimeEvent_MessageEvent) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_TimeEvent_MessageEvent.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_TimeEvent_MessageEvent proto.InternalMessageInfo
-
-func (m *Span_TimeEvent_MessageEvent) GetType() Span_TimeEvent_MessageEvent_Type {
- if m != nil {
- return m.Type
- }
- return Span_TimeEvent_MessageEvent_TYPE_UNSPECIFIED
-}
-
-func (m *Span_TimeEvent_MessageEvent) GetId() uint64 {
- if m != nil {
- return m.Id
- }
- return 0
-}
-
-func (m *Span_TimeEvent_MessageEvent) GetUncompressedSize() uint64 {
- if m != nil {
- return m.UncompressedSize
- }
- return 0
-}
-
-func (m *Span_TimeEvent_MessageEvent) GetCompressedSize() uint64 {
- if m != nil {
- return m.CompressedSize
- }
- return 0
-}
-
-// A collection of `TimeEvent`s. A `TimeEvent` is a time-stamped annotation
-// on the span, consisting of either user-supplied key-value pairs, or
-// details of a message sent/received between Spans.
-type Span_TimeEvents struct {
- // A collection of `TimeEvent`s.
- TimeEvent []*Span_TimeEvent `protobuf:"bytes,1,rep,name=time_event,json=timeEvent,proto3" json:"time_event,omitempty"`
- // The number of dropped annotations in all the included time events.
- // If the value is 0, then no annotations were dropped.
- DroppedAnnotationsCount int32 `protobuf:"varint,2,opt,name=dropped_annotations_count,json=droppedAnnotationsCount,proto3" json:"dropped_annotations_count,omitempty"`
- // The number of dropped message events in all the included time events.
- // If the value is 0, then no message events were dropped.
- DroppedMessageEventsCount int32 `protobuf:"varint,3,opt,name=dropped_message_events_count,json=droppedMessageEventsCount,proto3" json:"dropped_message_events_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_TimeEvents) Reset() { *m = Span_TimeEvents{} }
-func (m *Span_TimeEvents) String() string { return proto.CompactTextString(m) }
-func (*Span_TimeEvents) ProtoMessage() {}
-func (*Span_TimeEvents) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 3}
-}
-
-func (m *Span_TimeEvents) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_TimeEvents.Unmarshal(m, b)
-}
-func (m *Span_TimeEvents) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_TimeEvents.Marshal(b, m, deterministic)
-}
-func (m *Span_TimeEvents) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_TimeEvents.Merge(m, src)
-}
-func (m *Span_TimeEvents) XXX_Size() int {
- return xxx_messageInfo_Span_TimeEvents.Size(m)
-}
-func (m *Span_TimeEvents) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_TimeEvents.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_TimeEvents proto.InternalMessageInfo
-
-func (m *Span_TimeEvents) GetTimeEvent() []*Span_TimeEvent {
- if m != nil {
- return m.TimeEvent
- }
- return nil
-}
-
-func (m *Span_TimeEvents) GetDroppedAnnotationsCount() int32 {
- if m != nil {
- return m.DroppedAnnotationsCount
- }
- return 0
-}
-
-func (m *Span_TimeEvents) GetDroppedMessageEventsCount() int32 {
- if m != nil {
- return m.DroppedMessageEventsCount
- }
- return 0
-}
-
-// A pointer from the current span to another span in the same trace or in a
-// different trace. For example, this can be used in batching operations,
-// where a single batch handler processes multiple requests from different
-// traces or when the handler receives a request from a different project.
-type Span_Link struct {
- // A unique identifier of a trace that this linked span is part of. The ID is a
- // 16-byte array.
- TraceId []byte `protobuf:"bytes,1,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"`
- // A unique identifier for the linked span. The ID is an 8-byte array.
- SpanId []byte `protobuf:"bytes,2,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"`
- // The relationship of the current span relative to the linked span.
- Type Span_Link_Type `protobuf:"varint,3,opt,name=type,proto3,enum=opencensus.proto.trace.v1.Span_Link_Type" json:"type,omitempty"`
- // A set of attributes on the link.
- Attributes *Span_Attributes `protobuf:"bytes,4,opt,name=attributes,proto3" json:"attributes,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_Link) Reset() { *m = Span_Link{} }
-func (m *Span_Link) String() string { return proto.CompactTextString(m) }
-func (*Span_Link) ProtoMessage() {}
-func (*Span_Link) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 4}
-}
-
-func (m *Span_Link) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_Link.Unmarshal(m, b)
-}
-func (m *Span_Link) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_Link.Marshal(b, m, deterministic)
-}
-func (m *Span_Link) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_Link.Merge(m, src)
-}
-func (m *Span_Link) XXX_Size() int {
- return xxx_messageInfo_Span_Link.Size(m)
-}
-func (m *Span_Link) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_Link.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_Link proto.InternalMessageInfo
-
-func (m *Span_Link) GetTraceId() []byte {
- if m != nil {
- return m.TraceId
- }
- return nil
-}
-
-func (m *Span_Link) GetSpanId() []byte {
- if m != nil {
- return m.SpanId
- }
- return nil
-}
-
-func (m *Span_Link) GetType() Span_Link_Type {
- if m != nil {
- return m.Type
- }
- return Span_Link_TYPE_UNSPECIFIED
-}
-
-func (m *Span_Link) GetAttributes() *Span_Attributes {
- if m != nil {
- return m.Attributes
- }
- return nil
-}
-
-// A collection of links, which are references from this span to a span
-// in the same or different trace.
-type Span_Links struct {
- // A collection of links.
- Link []*Span_Link `protobuf:"bytes,1,rep,name=link,proto3" json:"link,omitempty"`
- // The number of dropped links after the maximum size was enforced. If
- // this value is 0, then no links were dropped.
- DroppedLinksCount int32 `protobuf:"varint,2,opt,name=dropped_links_count,json=droppedLinksCount,proto3" json:"dropped_links_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Span_Links) Reset() { *m = Span_Links{} }
-func (m *Span_Links) String() string { return proto.CompactTextString(m) }
-func (*Span_Links) ProtoMessage() {}
-func (*Span_Links) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{0, 5}
-}
-
-func (m *Span_Links) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Span_Links.Unmarshal(m, b)
-}
-func (m *Span_Links) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Span_Links.Marshal(b, m, deterministic)
-}
-func (m *Span_Links) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Span_Links.Merge(m, src)
-}
-func (m *Span_Links) XXX_Size() int {
- return xxx_messageInfo_Span_Links.Size(m)
-}
-func (m *Span_Links) XXX_DiscardUnknown() {
- xxx_messageInfo_Span_Links.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Span_Links proto.InternalMessageInfo
-
-func (m *Span_Links) GetLink() []*Span_Link {
- if m != nil {
- return m.Link
- }
- return nil
-}
-
-func (m *Span_Links) GetDroppedLinksCount() int32 {
- if m != nil {
- return m.DroppedLinksCount
- }
- return 0
-}
-
-// The `Status` type defines a logical error model that is suitable for different
-// programming environments, including REST APIs and RPC APIs. This proto's fields
-// are a subset of those of
-// [google.rpc.Status](https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto),
-// which is used by [gRPC](https://github.com/grpc).
-type Status struct {
- // The status code. This is optional field. It is safe to assume 0 (OK)
- // when not set.
- Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"`
- // A developer-facing error message, which should be in English.
- Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Status) Reset() { *m = Status{} }
-func (m *Status) String() string { return proto.CompactTextString(m) }
-func (*Status) ProtoMessage() {}
-func (*Status) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{1}
-}
-
-func (m *Status) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Status.Unmarshal(m, b)
-}
-func (m *Status) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Status.Marshal(b, m, deterministic)
-}
-func (m *Status) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Status.Merge(m, src)
-}
-func (m *Status) XXX_Size() int {
- return xxx_messageInfo_Status.Size(m)
-}
-func (m *Status) XXX_DiscardUnknown() {
- xxx_messageInfo_Status.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Status proto.InternalMessageInfo
-
-func (m *Status) GetCode() int32 {
- if m != nil {
- return m.Code
- }
- return 0
-}
-
-func (m *Status) GetMessage() string {
- if m != nil {
- return m.Message
- }
- return ""
-}
-
-// The value of an Attribute.
-type AttributeValue struct {
- // The type of the value.
- //
- // Types that are valid to be assigned to Value:
- // *AttributeValue_StringValue
- // *AttributeValue_IntValue
- // *AttributeValue_BoolValue
- // *AttributeValue_DoubleValue
- Value isAttributeValue_Value `protobuf_oneof:"value"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *AttributeValue) Reset() { *m = AttributeValue{} }
-func (m *AttributeValue) String() string { return proto.CompactTextString(m) }
-func (*AttributeValue) ProtoMessage() {}
-func (*AttributeValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{2}
-}
-
-func (m *AttributeValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_AttributeValue.Unmarshal(m, b)
-}
-func (m *AttributeValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_AttributeValue.Marshal(b, m, deterministic)
-}
-func (m *AttributeValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_AttributeValue.Merge(m, src)
-}
-func (m *AttributeValue) XXX_Size() int {
- return xxx_messageInfo_AttributeValue.Size(m)
-}
-func (m *AttributeValue) XXX_DiscardUnknown() {
- xxx_messageInfo_AttributeValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_AttributeValue proto.InternalMessageInfo
-
-type isAttributeValue_Value interface {
- isAttributeValue_Value()
-}
-
-type AttributeValue_StringValue struct {
- StringValue *TruncatableString `protobuf:"bytes,1,opt,name=string_value,json=stringValue,proto3,oneof"`
-}
-
-type AttributeValue_IntValue struct {
- IntValue int64 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof"`
-}
-
-type AttributeValue_BoolValue struct {
- BoolValue bool `protobuf:"varint,3,opt,name=bool_value,json=boolValue,proto3,oneof"`
-}
-
-type AttributeValue_DoubleValue struct {
- DoubleValue float64 `protobuf:"fixed64,4,opt,name=double_value,json=doubleValue,proto3,oneof"`
-}
-
-func (*AttributeValue_StringValue) isAttributeValue_Value() {}
-
-func (*AttributeValue_IntValue) isAttributeValue_Value() {}
-
-func (*AttributeValue_BoolValue) isAttributeValue_Value() {}
-
-func (*AttributeValue_DoubleValue) isAttributeValue_Value() {}
-
-func (m *AttributeValue) GetValue() isAttributeValue_Value {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *AttributeValue) GetStringValue() *TruncatableString {
- if x, ok := m.GetValue().(*AttributeValue_StringValue); ok {
- return x.StringValue
- }
- return nil
-}
-
-func (m *AttributeValue) GetIntValue() int64 {
- if x, ok := m.GetValue().(*AttributeValue_IntValue); ok {
- return x.IntValue
- }
- return 0
-}
-
-func (m *AttributeValue) GetBoolValue() bool {
- if x, ok := m.GetValue().(*AttributeValue_BoolValue); ok {
- return x.BoolValue
- }
- return false
-}
-
-func (m *AttributeValue) GetDoubleValue() float64 {
- if x, ok := m.GetValue().(*AttributeValue_DoubleValue); ok {
- return x.DoubleValue
- }
- return 0
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*AttributeValue) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*AttributeValue_StringValue)(nil),
- (*AttributeValue_IntValue)(nil),
- (*AttributeValue_BoolValue)(nil),
- (*AttributeValue_DoubleValue)(nil),
- }
-}
-
-// The call stack which originated this span.
-type StackTrace struct {
- // Stack frames in this stack trace.
- StackFrames *StackTrace_StackFrames `protobuf:"bytes,1,opt,name=stack_frames,json=stackFrames,proto3" json:"stack_frames,omitempty"`
- // The hash ID is used to conserve network bandwidth for duplicate
- // stack traces within a single trace.
- //
- // Often multiple spans will have identical stack traces.
- // The first occurrence of a stack trace should contain both
- // `stack_frames` and a value in `stack_trace_hash_id`.
- //
- // Subsequent spans within the same request can refer
- // to that stack trace by setting only `stack_trace_hash_id`.
- //
- // TODO: describe how to deal with the case where stack_trace_hash_id is
- // zero because it was not set.
- StackTraceHashId uint64 `protobuf:"varint,2,opt,name=stack_trace_hash_id,json=stackTraceHashId,proto3" json:"stack_trace_hash_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *StackTrace) Reset() { *m = StackTrace{} }
-func (m *StackTrace) String() string { return proto.CompactTextString(m) }
-func (*StackTrace) ProtoMessage() {}
-func (*StackTrace) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{3}
-}
-
-func (m *StackTrace) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_StackTrace.Unmarshal(m, b)
-}
-func (m *StackTrace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_StackTrace.Marshal(b, m, deterministic)
-}
-func (m *StackTrace) XXX_Merge(src proto.Message) {
- xxx_messageInfo_StackTrace.Merge(m, src)
-}
-func (m *StackTrace) XXX_Size() int {
- return xxx_messageInfo_StackTrace.Size(m)
-}
-func (m *StackTrace) XXX_DiscardUnknown() {
- xxx_messageInfo_StackTrace.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_StackTrace proto.InternalMessageInfo
-
-func (m *StackTrace) GetStackFrames() *StackTrace_StackFrames {
- if m != nil {
- return m.StackFrames
- }
- return nil
-}
-
-func (m *StackTrace) GetStackTraceHashId() uint64 {
- if m != nil {
- return m.StackTraceHashId
- }
- return 0
-}
-
-// A single stack frame in a stack trace.
-type StackTrace_StackFrame struct {
- // The fully-qualified name that uniquely identifies the function or
- // method that is active in this frame.
- FunctionName *TruncatableString `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"`
- // An un-mangled function name, if `function_name` is
- // [mangled](http://www.avabodh.com/cxxin/namemangling.html). The name can
- // be fully qualified.
- OriginalFunctionName *TruncatableString `protobuf:"bytes,2,opt,name=original_function_name,json=originalFunctionName,proto3" json:"original_function_name,omitempty"`
- // The name of the source file where the function call appears.
- FileName *TruncatableString `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"`
- // The line number in `file_name` where the function call appears.
- LineNumber int64 `protobuf:"varint,4,opt,name=line_number,json=lineNumber,proto3" json:"line_number,omitempty"`
- // The column number where the function call appears, if available.
- // This is important in JavaScript because of its anonymous functions.
- ColumnNumber int64 `protobuf:"varint,5,opt,name=column_number,json=columnNumber,proto3" json:"column_number,omitempty"`
- // The binary module from where the code was loaded.
- LoadModule *Module `protobuf:"bytes,6,opt,name=load_module,json=loadModule,proto3" json:"load_module,omitempty"`
- // The version of the deployed source code.
- SourceVersion *TruncatableString `protobuf:"bytes,7,opt,name=source_version,json=sourceVersion,proto3" json:"source_version,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *StackTrace_StackFrame) Reset() { *m = StackTrace_StackFrame{} }
-func (m *StackTrace_StackFrame) String() string { return proto.CompactTextString(m) }
-func (*StackTrace_StackFrame) ProtoMessage() {}
-func (*StackTrace_StackFrame) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{3, 0}
-}
-
-func (m *StackTrace_StackFrame) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_StackTrace_StackFrame.Unmarshal(m, b)
-}
-func (m *StackTrace_StackFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_StackTrace_StackFrame.Marshal(b, m, deterministic)
-}
-func (m *StackTrace_StackFrame) XXX_Merge(src proto.Message) {
- xxx_messageInfo_StackTrace_StackFrame.Merge(m, src)
-}
-func (m *StackTrace_StackFrame) XXX_Size() int {
- return xxx_messageInfo_StackTrace_StackFrame.Size(m)
-}
-func (m *StackTrace_StackFrame) XXX_DiscardUnknown() {
- xxx_messageInfo_StackTrace_StackFrame.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_StackTrace_StackFrame proto.InternalMessageInfo
-
-func (m *StackTrace_StackFrame) GetFunctionName() *TruncatableString {
- if m != nil {
- return m.FunctionName
- }
- return nil
-}
-
-func (m *StackTrace_StackFrame) GetOriginalFunctionName() *TruncatableString {
- if m != nil {
- return m.OriginalFunctionName
- }
- return nil
-}
-
-func (m *StackTrace_StackFrame) GetFileName() *TruncatableString {
- if m != nil {
- return m.FileName
- }
- return nil
-}
-
-func (m *StackTrace_StackFrame) GetLineNumber() int64 {
- if m != nil {
- return m.LineNumber
- }
- return 0
-}
-
-func (m *StackTrace_StackFrame) GetColumnNumber() int64 {
- if m != nil {
- return m.ColumnNumber
- }
- return 0
-}
-
-func (m *StackTrace_StackFrame) GetLoadModule() *Module {
- if m != nil {
- return m.LoadModule
- }
- return nil
-}
-
-func (m *StackTrace_StackFrame) GetSourceVersion() *TruncatableString {
- if m != nil {
- return m.SourceVersion
- }
- return nil
-}
-
-// A collection of stack frames, which can be truncated.
-type StackTrace_StackFrames struct {
- // Stack frames in this call stack.
- Frame []*StackTrace_StackFrame `protobuf:"bytes,1,rep,name=frame,proto3" json:"frame,omitempty"`
- // The number of stack frames that were dropped because there
- // were too many stack frames.
- // If this value is 0, then no stack frames were dropped.
- DroppedFramesCount int32 `protobuf:"varint,2,opt,name=dropped_frames_count,json=droppedFramesCount,proto3" json:"dropped_frames_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *StackTrace_StackFrames) Reset() { *m = StackTrace_StackFrames{} }
-func (m *StackTrace_StackFrames) String() string { return proto.CompactTextString(m) }
-func (*StackTrace_StackFrames) ProtoMessage() {}
-func (*StackTrace_StackFrames) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{3, 1}
-}
-
-func (m *StackTrace_StackFrames) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_StackTrace_StackFrames.Unmarshal(m, b)
-}
-func (m *StackTrace_StackFrames) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_StackTrace_StackFrames.Marshal(b, m, deterministic)
-}
-func (m *StackTrace_StackFrames) XXX_Merge(src proto.Message) {
- xxx_messageInfo_StackTrace_StackFrames.Merge(m, src)
-}
-func (m *StackTrace_StackFrames) XXX_Size() int {
- return xxx_messageInfo_StackTrace_StackFrames.Size(m)
-}
-func (m *StackTrace_StackFrames) XXX_DiscardUnknown() {
- xxx_messageInfo_StackTrace_StackFrames.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_StackTrace_StackFrames proto.InternalMessageInfo
-
-func (m *StackTrace_StackFrames) GetFrame() []*StackTrace_StackFrame {
- if m != nil {
- return m.Frame
- }
- return nil
-}
-
-func (m *StackTrace_StackFrames) GetDroppedFramesCount() int32 {
- if m != nil {
- return m.DroppedFramesCount
- }
- return 0
-}
-
-// A description of a binary module.
-type Module struct {
- // TODO: document the meaning of this field.
- // For example: main binary, kernel modules, and dynamic libraries
- // such as libc.so, sharedlib.so.
- Module *TruncatableString `protobuf:"bytes,1,opt,name=module,proto3" json:"module,omitempty"`
- // A unique identifier for the module, usually a hash of its
- // contents.
- BuildId *TruncatableString `protobuf:"bytes,2,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Module) Reset() { *m = Module{} }
-func (m *Module) String() string { return proto.CompactTextString(m) }
-func (*Module) ProtoMessage() {}
-func (*Module) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{4}
-}
-
-func (m *Module) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Module.Unmarshal(m, b)
-}
-func (m *Module) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Module.Marshal(b, m, deterministic)
-}
-func (m *Module) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Module.Merge(m, src)
-}
-func (m *Module) XXX_Size() int {
- return xxx_messageInfo_Module.Size(m)
-}
-func (m *Module) XXX_DiscardUnknown() {
- xxx_messageInfo_Module.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Module proto.InternalMessageInfo
-
-func (m *Module) GetModule() *TruncatableString {
- if m != nil {
- return m.Module
- }
- return nil
-}
-
-func (m *Module) GetBuildId() *TruncatableString {
- if m != nil {
- return m.BuildId
- }
- return nil
-}
-
-// A string that might be shortened to a specified length.
-type TruncatableString struct {
- // The shortened string. For example, if the original string was 500 bytes long and
- // the limit of the string was 128 bytes, then this value contains the first 128
- // bytes of the 500-byte string. Note that truncation always happens on a
- // character boundary, to ensure that a truncated string is still valid UTF-8.
- // Because it may contain multi-byte characters, the size of the truncated string
- // may be less than the truncation limit.
- Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
- // The number of bytes removed from the original string. If this
- // value is 0, then the string was not shortened.
- TruncatedByteCount int32 `protobuf:"varint,2,opt,name=truncated_byte_count,json=truncatedByteCount,proto3" json:"truncated_byte_count,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *TruncatableString) Reset() { *m = TruncatableString{} }
-func (m *TruncatableString) String() string { return proto.CompactTextString(m) }
-func (*TruncatableString) ProtoMessage() {}
-func (*TruncatableString) Descriptor() ([]byte, []int) {
- return fileDescriptor_8ea38bbb821bf584, []int{5}
-}
-
-func (m *TruncatableString) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_TruncatableString.Unmarshal(m, b)
-}
-func (m *TruncatableString) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_TruncatableString.Marshal(b, m, deterministic)
-}
-func (m *TruncatableString) XXX_Merge(src proto.Message) {
- xxx_messageInfo_TruncatableString.Merge(m, src)
-}
-func (m *TruncatableString) XXX_Size() int {
- return xxx_messageInfo_TruncatableString.Size(m)
-}
-func (m *TruncatableString) XXX_DiscardUnknown() {
- xxx_messageInfo_TruncatableString.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_TruncatableString proto.InternalMessageInfo
-
-func (m *TruncatableString) GetValue() string {
- if m != nil {
- return m.Value
- }
- return ""
-}
-
-func (m *TruncatableString) GetTruncatedByteCount() int32 {
- if m != nil {
- return m.TruncatedByteCount
- }
- return 0
-}
-
-func init() {
- proto.RegisterEnum("opencensus.proto.trace.v1.Span_SpanKind", Span_SpanKind_name, Span_SpanKind_value)
- proto.RegisterEnum("opencensus.proto.trace.v1.Span_TimeEvent_MessageEvent_Type", Span_TimeEvent_MessageEvent_Type_name, Span_TimeEvent_MessageEvent_Type_value)
- proto.RegisterEnum("opencensus.proto.trace.v1.Span_Link_Type", Span_Link_Type_name, Span_Link_Type_value)
- proto.RegisterType((*Span)(nil), "opencensus.proto.trace.v1.Span")
- proto.RegisterType((*Span_Tracestate)(nil), "opencensus.proto.trace.v1.Span.Tracestate")
- proto.RegisterType((*Span_Tracestate_Entry)(nil), "opencensus.proto.trace.v1.Span.Tracestate.Entry")
- proto.RegisterType((*Span_Attributes)(nil), "opencensus.proto.trace.v1.Span.Attributes")
- proto.RegisterMapType((map[string]*AttributeValue)(nil), "opencensus.proto.trace.v1.Span.Attributes.AttributeMapEntry")
- proto.RegisterType((*Span_TimeEvent)(nil), "opencensus.proto.trace.v1.Span.TimeEvent")
- proto.RegisterType((*Span_TimeEvent_Annotation)(nil), "opencensus.proto.trace.v1.Span.TimeEvent.Annotation")
- proto.RegisterType((*Span_TimeEvent_MessageEvent)(nil), "opencensus.proto.trace.v1.Span.TimeEvent.MessageEvent")
- proto.RegisterType((*Span_TimeEvents)(nil), "opencensus.proto.trace.v1.Span.TimeEvents")
- proto.RegisterType((*Span_Link)(nil), "opencensus.proto.trace.v1.Span.Link")
- proto.RegisterType((*Span_Links)(nil), "opencensus.proto.trace.v1.Span.Links")
- proto.RegisterType((*Status)(nil), "opencensus.proto.trace.v1.Status")
- proto.RegisterType((*AttributeValue)(nil), "opencensus.proto.trace.v1.AttributeValue")
- proto.RegisterType((*StackTrace)(nil), "opencensus.proto.trace.v1.StackTrace")
- proto.RegisterType((*StackTrace_StackFrame)(nil), "opencensus.proto.trace.v1.StackTrace.StackFrame")
- proto.RegisterType((*StackTrace_StackFrames)(nil), "opencensus.proto.trace.v1.StackTrace.StackFrames")
- proto.RegisterType((*Module)(nil), "opencensus.proto.trace.v1.Module")
- proto.RegisterType((*TruncatableString)(nil), "opencensus.proto.trace.v1.TruncatableString")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/trace/v1/trace.proto", fileDescriptor_8ea38bbb821bf584)
-}
-
-var fileDescriptor_8ea38bbb821bf584 = []byte{
- // 1557 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xeb, 0x52, 0x1b, 0x47,
- 0x16, 0x66, 0x74, 0xd7, 0x91, 0x90, 0x45, 0x1b, 0xdb, 0x83, 0xd6, 0xbb, 0x66, 0x65, 0x7b, 0x17,
- 0xaf, 0x17, 0x61, 0xb0, 0xd7, 0xe5, 0x6b, 0x79, 0x11, 0x88, 0x48, 0x06, 0x2b, 0x72, 0x4b, 0xa6,
- 0x72, 0xa9, 0xd4, 0xd4, 0x48, 0xd3, 0x88, 0x09, 0x52, 0xcf, 0x64, 0xa6, 0x87, 0x14, 0x7e, 0x81,
- 0x54, 0x2a, 0xff, 0x52, 0x95, 0xca, 0x0b, 0xe4, 0x47, 0x5e, 0x24, 0x0f, 0x90, 0xca, 0x73, 0xe4,
- 0x09, 0xf2, 0x27, 0xd5, 0xdd, 0x73, 0x13, 0xd8, 0xa0, 0xc8, 0x7f, 0xa8, 0x9e, 0xee, 0xf3, 0x7d,
- 0x7d, 0x4e, 0x9f, 0x2b, 0x82, 0xdb, 0x96, 0x4d, 0xe8, 0x80, 0x50, 0xd7, 0x73, 0xd7, 0x6c, 0xc7,
- 0x62, 0xd6, 0x1a, 0x73, 0xf4, 0x01, 0x59, 0x3b, 0x5e, 0x97, 0x8b, 0x9a, 0xd8, 0x44, 0x4b, 0x91,
- 0x98, 0xdc, 0xa9, 0xc9, 0xd3, 0xe3, 0xf5, 0xca, 0xdd, 0x33, 0x0c, 0x0e, 0x71, 0x2d, 0xcf, 0x91,
- 0x24, 0xc1, 0x5a, 0xa2, 0x2a, 0x37, 0x86, 0x96, 0x35, 0x1c, 0x11, 0x29, 0xd8, 0xf7, 0x0e, 0xd6,
- 0x98, 0x39, 0x26, 0x2e, 0xd3, 0xc7, 0xb6, 0x2f, 0xf0, 0x8f, 0xd3, 0x02, 0x5f, 0x3b, 0xba, 0x6d,
- 0x13, 0xc7, 0xbf, 0xb6, 0xfa, 0xcb, 0x15, 0x48, 0x75, 0x6d, 0x9d, 0xa2, 0x25, 0xc8, 0x09, 0x15,
- 0x34, 0xd3, 0x50, 0x95, 0x65, 0x65, 0xa5, 0x88, 0xb3, 0xe2, 0xbb, 0x65, 0xa0, 0x6b, 0x90, 0x75,
- 0x6d, 0x9d, 0xf2, 0x93, 0x84, 0x38, 0xc9, 0xf0, 0xcf, 0x96, 0x81, 0x5e, 0x02, 0x08, 0x19, 0x97,
- 0xe9, 0x8c, 0xa8, 0x97, 0x96, 0x95, 0x95, 0xc2, 0xc6, 0x7f, 0x6a, 0xef, 0x35, 0xad, 0xc6, 0x2f,
- 0xaa, 0xf5, 0x42, 0x04, 0x8e, 0xa1, 0xd1, 0x2d, 0x28, 0xd9, 0xba, 0x43, 0x28, 0xd3, 0x82, 0xbb,
- 0x92, 0xe2, 0xae, 0xa2, 0xdc, 0xed, 0xca, 0x1b, 0xff, 0x0f, 0x29, 0xaa, 0x8f, 0x89, 0x9a, 0x12,
- 0x77, 0xfd, 0xf7, 0x9c, 0xbb, 0x7a, 0x8e, 0x47, 0x07, 0x3a, 0xd3, 0xfb, 0x23, 0xd2, 0x65, 0x8e,
- 0x49, 0x87, 0x58, 0x20, 0xd1, 0x33, 0x48, 0x1d, 0x99, 0xd4, 0x50, 0x4b, 0xcb, 0xca, 0x4a, 0x69,
- 0x63, 0xe5, 0x22, 0x6d, 0xf9, 0x9f, 0x5d, 0x93, 0x1a, 0x58, 0xa0, 0xd0, 0x63, 0x00, 0x97, 0xe9,
- 0x0e, 0xd3, 0xf8, 0x3b, 0xab, 0x69, 0xa1, 0x45, 0xa5, 0x26, 0xdf, 0xb8, 0x16, 0xbc, 0x71, 0xad,
- 0x17, 0x38, 0x01, 0xe7, 0x85, 0x34, 0xff, 0x46, 0xff, 0x83, 0x1c, 0xa1, 0x86, 0x04, 0x66, 0x2e,
- 0x04, 0x66, 0x09, 0x35, 0x04, 0xec, 0x25, 0x80, 0xce, 0x98, 0x63, 0xf6, 0x3d, 0x46, 0x5c, 0x35,
- 0x3b, 0xdd, 0x1b, 0x6f, 0x86, 0x08, 0x1c, 0x43, 0xa3, 0x1d, 0x28, 0xb8, 0x4c, 0x1f, 0x1c, 0x69,
- 0x42, 0x5a, 0xcd, 0x09, 0xb2, 0xdb, 0xe7, 0x91, 0x71, 0x69, 0xe1, 0x30, 0x0c, 0x6e, 0xb8, 0x46,
- 0xbb, 0x50, 0xe0, 0x66, 0x68, 0xe4, 0x98, 0x50, 0xe6, 0xaa, 0xf9, 0x29, 0x1d, 0x6f, 0x8e, 0x49,
- 0x43, 0x20, 0x30, 0xb0, 0x70, 0x8d, 0x9e, 0x42, 0x7a, 0x64, 0xd2, 0x23, 0x57, 0x85, 0x8b, 0xd5,
- 0xe1, 0x34, 0x7b, 0x5c, 0x18, 0x4b, 0x0c, 0x7a, 0x0c, 0x19, 0x1e, 0x3e, 0x9e, 0xab, 0x16, 0x04,
- 0xfa, 0x9f, 0xe7, 0x1b, 0xc3, 0x3c, 0x17, 0xfb, 0x00, 0x54, 0x87, 0x5c, 0x90, 0x4c, 0x6a, 0x59,
- 0x80, 0xff, 0x75, 0x16, 0x1c, 0xa6, 0xdb, 0xf1, 0x7a, 0x0d, 0xfb, 0x6b, 0x1c, 0xe2, 0xd0, 0x27,
- 0xf0, 0x37, 0x57, 0x1f, 0x13, 0xcd, 0x76, 0xac, 0x01, 0x71, 0x5d, 0x4d, 0x77, 0xb5, 0x58, 0x10,
- 0xab, 0xc5, 0xf7, 0xb8, 0xb9, 0x6e, 0x59, 0xa3, 0x7d, 0x7d, 0xe4, 0x11, 0x7c, 0x8d, 0xc3, 0x3b,
- 0x12, 0xbd, 0xe9, 0x76, 0xc2, 0x50, 0x47, 0x3b, 0x50, 0x1e, 0x1c, 0x9a, 0x23, 0x43, 0x66, 0xc3,
- 0xc0, 0xf2, 0x28, 0x53, 0xe7, 0x05, 0xdd, 0xf5, 0x33, 0x74, 0x6f, 0x5a, 0x94, 0xdd, 0xdf, 0x90,
- 0x84, 0x25, 0x81, 0xe2, 0x14, 0x5b, 0x1c, 0x53, 0xf9, 0x56, 0x01, 0x88, 0x32, 0x0e, 0xbd, 0x84,
- 0x2c, 0xa1, 0xcc, 0x31, 0x89, 0xab, 0x2a, 0xcb, 0xc9, 0x95, 0xc2, 0xc6, 0xbd, 0xe9, 0xd3, 0xb5,
- 0xd6, 0xa0, 0xcc, 0x39, 0xc1, 0x01, 0x41, 0x65, 0x0d, 0xd2, 0x62, 0x07, 0x95, 0x21, 0x79, 0x44,
- 0x4e, 0x44, 0xd5, 0xc8, 0x63, 0xbe, 0x44, 0x8b, 0x90, 0x3e, 0xe6, 0xea, 0x88, 0x7a, 0x91, 0xc7,
- 0xf2, 0xa3, 0xf2, 0x43, 0x02, 0x20, 0x8a, 0x4c, 0xa4, 0xc3, 0x7c, 0x18, 0x9b, 0xda, 0x58, 0xb7,
- 0x7d, 0x8d, 0x9e, 0x4d, 0x1f, 0xdc, 0xd1, 0xf2, 0x95, 0x6e, 0x4b, 0xed, 0x8a, 0x7a, 0x6c, 0x0b,
- 0x3d, 0x02, 0xd5, 0x70, 0x2c, 0xdb, 0x26, 0x86, 0x16, 0xa5, 0x81, 0xff, 0x9a, 0x5c, 0xb5, 0x34,
- 0xbe, 0xea, 0x9f, 0x47, 0xa4, 0xf2, 0xdd, 0xbe, 0x84, 0x85, 0x33, 0xe4, 0xef, 0x30, 0xf4, 0x45,
- 0xdc, 0xd0, 0xc2, 0xc6, 0x9d, 0x73, 0x74, 0x0f, 0xe9, 0xa4, 0xa3, 0x24, 0xee, 0x49, 0xe2, 0x91,
- 0x52, 0xf9, 0x29, 0x0d, 0xf9, 0x30, 0x39, 0x50, 0x0d, 0x52, 0xa2, 0x46, 0x28, 0x17, 0xd6, 0x08,
- 0x21, 0x87, 0xf6, 0x01, 0x74, 0x4a, 0x2d, 0xa6, 0x33, 0xd3, 0xa2, 0xbe, 0x1e, 0x0f, 0xa6, 0xce,
- 0xc5, 0xda, 0x66, 0x88, 0x6d, 0xce, 0xe1, 0x18, 0x13, 0xfa, 0x02, 0xe6, 0xc7, 0xc4, 0x75, 0xf5,
- 0xa1, 0x9f, 0xe7, 0xa2, 0x1e, 0x17, 0x36, 0x1e, 0x4e, 0x4f, 0xfd, 0x4a, 0xc2, 0xc5, 0x47, 0x73,
- 0x0e, 0x17, 0xc7, 0xb1, 0xef, 0xca, 0xcf, 0x0a, 0x40, 0x74, 0x37, 0x6a, 0x43, 0xc1, 0x20, 0xee,
- 0xc0, 0x31, 0x6d, 0x61, 0x86, 0x32, 0x43, 0x7d, 0x8f, 0x13, 0x9c, 0x2a, 0x9b, 0x89, 0x0f, 0x29,
- 0x9b, 0x95, 0x3f, 0x14, 0x28, 0xc6, 0x6d, 0x41, 0x1f, 0x43, 0x8a, 0x9d, 0xd8, 0xd2, 0x45, 0xa5,
- 0x8d, 0xa7, 0xb3, 0xbd, 0x48, 0xad, 0x77, 0x62, 0x13, 0x2c, 0x88, 0x50, 0x09, 0x12, 0x7e, 0x73,
- 0x4d, 0xe1, 0x84, 0x69, 0xa0, 0xbb, 0xb0, 0xe0, 0xd1, 0x81, 0x35, 0xb6, 0x1d, 0xe2, 0xba, 0xc4,
- 0xd0, 0x5c, 0xf3, 0x2d, 0x11, 0xef, 0x9f, 0xc2, 0xe5, 0xf8, 0x41, 0xd7, 0x7c, 0x4b, 0xd0, 0xbf,
- 0xe1, 0xd2, 0x69, 0xd1, 0x94, 0x10, 0x2d, 0x4d, 0x0a, 0x56, 0x1f, 0x40, 0x8a, 0xdf, 0x89, 0x16,
- 0xa1, 0xdc, 0xfb, 0xb4, 0xd3, 0xd0, 0xde, 0xb4, 0xbb, 0x9d, 0xc6, 0x56, 0x6b, 0xa7, 0xd5, 0xd8,
- 0x2e, 0xcf, 0xa1, 0x1c, 0xa4, 0xba, 0x8d, 0x76, 0xaf, 0xac, 0xa0, 0x22, 0xe4, 0x70, 0x63, 0xab,
- 0xd1, 0xda, 0x6f, 0x6c, 0x97, 0x13, 0xf5, 0xac, 0x1f, 0xe2, 0x95, 0xdf, 0x78, 0x29, 0x89, 0xea,
- 0x76, 0x13, 0x20, 0x6a, 0x02, 0x7e, 0xee, 0xde, 0x99, 0xfa, 0x29, 0x70, 0x3e, 0x6c, 0x01, 0xe8,
- 0x09, 0x2c, 0x85, 0x59, 0x1a, 0x46, 0xc4, 0x64, 0x9a, 0x5e, 0x0b, 0xd2, 0x34, 0x3a, 0x17, 0x79,
- 0x8a, 0x5e, 0xc0, 0xf5, 0x00, 0x3b, 0x11, 0xad, 0x01, 0x3c, 0x29, 0xe0, 0x01, 0x7f, 0xfc, 0xfd,
- 0xfd, 0x44, 0xff, 0x3e, 0x01, 0x29, 0xde, 0x52, 0x66, 0x1a, 0x80, 0x9e, 0xfb, 0x81, 0x90, 0x14,
- 0x81, 0x70, 0x67, 0x9a, 0xd6, 0x15, 0x77, 0xfb, 0x64, 0x90, 0xa6, 0x3e, 0x24, 0x48, 0xab, 0xbb,
- 0xe7, 0x3a, 0xf7, 0x0a, 0x2c, 0x6c, 0x35, 0x5b, 0x7b, 0xdb, 0xda, 0x5e, 0xab, 0xbd, 0xdb, 0xd8,
- 0xd6, 0xba, 0x9d, 0xcd, 0x76, 0x59, 0x41, 0x57, 0x01, 0x75, 0x36, 0x71, 0xa3, 0xdd, 0x9b, 0xd8,
- 0x4f, 0x54, 0xbe, 0x82, 0xb4, 0x68, 0xb3, 0xe8, 0x11, 0xa4, 0x78, 0xa3, 0xf5, 0xdd, 0x7b, 0x6b,
- 0x1a, 0x03, 0xb1, 0x40, 0xa0, 0x1a, 0x5c, 0x0e, 0x1c, 0x23, 0x5a, 0xf5, 0x84, 0x3b, 0x17, 0xfc,
- 0x23, 0x71, 0x89, 0xf0, 0x43, 0xf5, 0x39, 0xe4, 0x82, 0x59, 0x0b, 0x2d, 0xc1, 0x15, 0xae, 0x88,
- 0xb6, 0xdb, 0x6a, 0x6f, 0x9f, 0x32, 0x04, 0x20, 0xd3, 0x6d, 0xe0, 0xfd, 0x06, 0x2e, 0x2b, 0x7c,
- 0xbd, 0xb5, 0xd7, 0xe2, 0x31, 0x9b, 0xa8, 0x3e, 0x84, 0x8c, 0xec, 0xef, 0x08, 0x41, 0x6a, 0x60,
- 0x19, 0x32, 0x39, 0xd3, 0x58, 0xac, 0x91, 0x0a, 0x59, 0x3f, 0x3a, 0xfc, 0x8e, 0x14, 0x7c, 0x56,
- 0x7f, 0x55, 0xa0, 0x34, 0x59, 0x99, 0xd1, 0x6b, 0x28, 0xba, 0xa2, 0xa2, 0x68, 0xb2, 0xb4, 0xcf,
- 0x50, 0x8b, 0x9a, 0x73, 0xb8, 0x20, 0x39, 0x24, 0xe5, 0xdf, 0x21, 0x6f, 0x52, 0xa6, 0x45, 0xad,
- 0x22, 0xd9, 0x9c, 0xc3, 0x39, 0x93, 0x32, 0x79, 0x7c, 0x03, 0xa0, 0x6f, 0x59, 0x23, 0xff, 0x9c,
- 0x07, 0x53, 0xae, 0x39, 0x87, 0xf3, 0xfd, 0x60, 0x4c, 0x40, 0x37, 0xa1, 0x68, 0x58, 0x5e, 0x7f,
- 0x44, 0x7c, 0x11, 0x1e, 0x2a, 0x0a, 0xbf, 0x44, 0xee, 0x0a, 0xa1, 0x30, 0x51, 0xab, 0xdf, 0x65,
- 0x00, 0xa2, 0xc9, 0x0d, 0xf5, 0xb8, 0x3d, 0x7c, 0xea, 0x3b, 0x70, 0xf4, 0xb1, 0x68, 0xfc, 0xdc,
- 0x9e, 0xf5, 0xa9, 0xc6, 0x3e, 0xb9, 0xdc, 0x11, 0x40, 0x2c, 0x87, 0x47, 0xf9, 0x81, 0x56, 0xe1,
- 0x72, 0x6c, 0x96, 0xd4, 0x0e, 0x75, 0xf7, 0x50, 0x0b, 0x6b, 0x58, 0x39, 0x1a, 0x16, 0x9b, 0xba,
- 0x7b, 0xd8, 0x32, 0x2a, 0xbf, 0x27, 0x7d, 0x9d, 0x04, 0x1c, 0xbd, 0x86, 0xf9, 0x03, 0x8f, 0x0e,
- 0x78, 0x22, 0x6b, 0x62, 0xa0, 0x9f, 0xa5, 0xe0, 0x17, 0x03, 0x8a, 0x36, 0xa7, 0xec, 0xc3, 0x55,
- 0xcb, 0x31, 0x87, 0x26, 0xd5, 0x47, 0xda, 0x24, 0x77, 0x62, 0x06, 0xee, 0xc5, 0x80, 0x6b, 0x27,
- 0x7e, 0x47, 0x0b, 0xf2, 0x07, 0xe6, 0x88, 0x48, 0xda, 0xe4, 0x0c, 0xb4, 0x39, 0x0e, 0x17, 0x54,
- 0x37, 0xa0, 0x30, 0x32, 0x29, 0xd1, 0xa8, 0x37, 0xee, 0x13, 0x47, 0x78, 0x34, 0x89, 0x81, 0x6f,
- 0xb5, 0xc5, 0x0e, 0xba, 0x09, 0xf3, 0x03, 0x6b, 0xe4, 0x8d, 0x69, 0x20, 0x92, 0x16, 0x22, 0x45,
- 0xb9, 0xe9, 0x0b, 0xd5, 0xa1, 0x30, 0xb2, 0x74, 0x43, 0x1b, 0x5b, 0x86, 0x37, 0x0a, 0xfe, 0xaf,
- 0x38, 0x6f, 0x08, 0x7e, 0x25, 0x04, 0x31, 0x70, 0x94, 0x5c, 0xa3, 0x2e, 0x94, 0xe4, 0x38, 0xab,
- 0x1d, 0x13, 0xc7, 0xe5, 0xdd, 0x37, 0x3b, 0x83, 0x65, 0xf3, 0x92, 0x63, 0x5f, 0x52, 0x54, 0xbe,
- 0x51, 0xa0, 0x10, 0x8b, 0x1d, 0xb4, 0x03, 0x69, 0x11, 0x7e, 0xd3, 0x8c, 0x9d, 0xef, 0x8a, 0x3e,
- 0x2c, 0xe1, 0xe8, 0x1e, 0x2c, 0x06, 0x65, 0x45, 0x86, 0xf3, 0x44, 0x5d, 0x41, 0xfe, 0x99, 0xbc,
- 0x54, 0x16, 0x96, 0x1f, 0x15, 0xc8, 0xf8, 0x96, 0x6e, 0x43, 0xc6, 0x7f, 0xa8, 0x59, 0xc2, 0xcd,
- 0xc7, 0xa2, 0x8f, 0x20, 0xd7, 0xf7, 0xf8, 0x68, 0xee, 0x87, 0xfb, 0x5f, 0xe5, 0xc9, 0x0a, 0x74,
- 0xcb, 0xa8, 0x7e, 0x0e, 0x0b, 0x67, 0x4e, 0xa3, 0xd1, 0x59, 0x89, 0x8d, 0xce, 0xdc, 0x6c, 0x26,
- 0x45, 0x89, 0xa1, 0xf5, 0x4f, 0x18, 0x99, 0x34, 0x3b, 0x3c, 0xab, 0x9f, 0x30, 0x22, 0xcc, 0xae,
- 0xdb, 0x70, 0xdd, 0xb4, 0xde, 0xaf, 0x57, 0x5d, 0xfe, 0x57, 0xd0, 0xe1, 0x9b, 0x1d, 0xe5, 0xb3,
- 0xfa, 0xd0, 0x64, 0x87, 0x5e, 0xbf, 0x36, 0xb0, 0xc6, 0x6b, 0x52, 0x7e, 0xd5, 0xa4, 0x2e, 0x73,
- 0xbc, 0x31, 0xa1, 0xb2, 0xdf, 0xae, 0x45, 0x54, 0xab, 0xf2, 0x67, 0x89, 0x21, 0xa1, 0xab, 0xc3,
- 0xe8, 0xf7, 0x8d, 0x7e, 0x46, 0x6c, 0xdf, 0xff, 0x33, 0x00, 0x00, 0xff, 0xff, 0x1e, 0xe0, 0x94,
- 0x45, 0x03, 0x11, 0x00, 0x00,
-}
diff --git a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace_config.pb.go b/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace_config.pb.go
deleted file mode 100644
index 2ac2d28c4..000000000
--- a/vendor/github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1/trace_config.pb.go
+++ /dev/null
@@ -1,358 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: opencensus/proto/trace/v1/trace_config.proto
-
-package v1
-
-import (
- fmt "fmt"
- proto "github.com/golang/protobuf/proto"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// How spans should be sampled:
-// - Always off
-// - Always on
-// - Always follow the parent Span's decision (off if no parent).
-type ConstantSampler_ConstantDecision int32
-
-const (
- ConstantSampler_ALWAYS_OFF ConstantSampler_ConstantDecision = 0
- ConstantSampler_ALWAYS_ON ConstantSampler_ConstantDecision = 1
- ConstantSampler_ALWAYS_PARENT ConstantSampler_ConstantDecision = 2
-)
-
-var ConstantSampler_ConstantDecision_name = map[int32]string{
- 0: "ALWAYS_OFF",
- 1: "ALWAYS_ON",
- 2: "ALWAYS_PARENT",
-}
-
-var ConstantSampler_ConstantDecision_value = map[string]int32{
- "ALWAYS_OFF": 0,
- "ALWAYS_ON": 1,
- "ALWAYS_PARENT": 2,
-}
-
-func (x ConstantSampler_ConstantDecision) String() string {
- return proto.EnumName(ConstantSampler_ConstantDecision_name, int32(x))
-}
-
-func (ConstantSampler_ConstantDecision) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_5359209b41ff50c5, []int{2, 0}
-}
-
-// Global configuration of the trace service. All fields must be specified, or
-// the default (zero) values will be used for each type.
-type TraceConfig struct {
- // The global default sampler used to make decisions on span sampling.
- //
- // Types that are valid to be assigned to Sampler:
- // *TraceConfig_ProbabilitySampler
- // *TraceConfig_ConstantSampler
- // *TraceConfig_RateLimitingSampler
- Sampler isTraceConfig_Sampler `protobuf_oneof:"sampler"`
- // The global default max number of attributes per span.
- MaxNumberOfAttributes int64 `protobuf:"varint,4,opt,name=max_number_of_attributes,json=maxNumberOfAttributes,proto3" json:"max_number_of_attributes,omitempty"`
- // The global default max number of annotation events per span.
- MaxNumberOfAnnotations int64 `protobuf:"varint,5,opt,name=max_number_of_annotations,json=maxNumberOfAnnotations,proto3" json:"max_number_of_annotations,omitempty"`
- // The global default max number of message events per span.
- MaxNumberOfMessageEvents int64 `protobuf:"varint,6,opt,name=max_number_of_message_events,json=maxNumberOfMessageEvents,proto3" json:"max_number_of_message_events,omitempty"`
- // The global default max number of link entries per span.
- MaxNumberOfLinks int64 `protobuf:"varint,7,opt,name=max_number_of_links,json=maxNumberOfLinks,proto3" json:"max_number_of_links,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *TraceConfig) Reset() { *m = TraceConfig{} }
-func (m *TraceConfig) String() string { return proto.CompactTextString(m) }
-func (*TraceConfig) ProtoMessage() {}
-func (*TraceConfig) Descriptor() ([]byte, []int) {
- return fileDescriptor_5359209b41ff50c5, []int{0}
-}
-
-func (m *TraceConfig) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_TraceConfig.Unmarshal(m, b)
-}
-func (m *TraceConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_TraceConfig.Marshal(b, m, deterministic)
-}
-func (m *TraceConfig) XXX_Merge(src proto.Message) {
- xxx_messageInfo_TraceConfig.Merge(m, src)
-}
-func (m *TraceConfig) XXX_Size() int {
- return xxx_messageInfo_TraceConfig.Size(m)
-}
-func (m *TraceConfig) XXX_DiscardUnknown() {
- xxx_messageInfo_TraceConfig.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_TraceConfig proto.InternalMessageInfo
-
-type isTraceConfig_Sampler interface {
- isTraceConfig_Sampler()
-}
-
-type TraceConfig_ProbabilitySampler struct {
- ProbabilitySampler *ProbabilitySampler `protobuf:"bytes,1,opt,name=probability_sampler,json=probabilitySampler,proto3,oneof"`
-}
-
-type TraceConfig_ConstantSampler struct {
- ConstantSampler *ConstantSampler `protobuf:"bytes,2,opt,name=constant_sampler,json=constantSampler,proto3,oneof"`
-}
-
-type TraceConfig_RateLimitingSampler struct {
- RateLimitingSampler *RateLimitingSampler `protobuf:"bytes,3,opt,name=rate_limiting_sampler,json=rateLimitingSampler,proto3,oneof"`
-}
-
-func (*TraceConfig_ProbabilitySampler) isTraceConfig_Sampler() {}
-
-func (*TraceConfig_ConstantSampler) isTraceConfig_Sampler() {}
-
-func (*TraceConfig_RateLimitingSampler) isTraceConfig_Sampler() {}
-
-func (m *TraceConfig) GetSampler() isTraceConfig_Sampler {
- if m != nil {
- return m.Sampler
- }
- return nil
-}
-
-func (m *TraceConfig) GetProbabilitySampler() *ProbabilitySampler {
- if x, ok := m.GetSampler().(*TraceConfig_ProbabilitySampler); ok {
- return x.ProbabilitySampler
- }
- return nil
-}
-
-func (m *TraceConfig) GetConstantSampler() *ConstantSampler {
- if x, ok := m.GetSampler().(*TraceConfig_ConstantSampler); ok {
- return x.ConstantSampler
- }
- return nil
-}
-
-func (m *TraceConfig) GetRateLimitingSampler() *RateLimitingSampler {
- if x, ok := m.GetSampler().(*TraceConfig_RateLimitingSampler); ok {
- return x.RateLimitingSampler
- }
- return nil
-}
-
-func (m *TraceConfig) GetMaxNumberOfAttributes() int64 {
- if m != nil {
- return m.MaxNumberOfAttributes
- }
- return 0
-}
-
-func (m *TraceConfig) GetMaxNumberOfAnnotations() int64 {
- if m != nil {
- return m.MaxNumberOfAnnotations
- }
- return 0
-}
-
-func (m *TraceConfig) GetMaxNumberOfMessageEvents() int64 {
- if m != nil {
- return m.MaxNumberOfMessageEvents
- }
- return 0
-}
-
-func (m *TraceConfig) GetMaxNumberOfLinks() int64 {
- if m != nil {
- return m.MaxNumberOfLinks
- }
- return 0
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*TraceConfig) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*TraceConfig_ProbabilitySampler)(nil),
- (*TraceConfig_ConstantSampler)(nil),
- (*TraceConfig_RateLimitingSampler)(nil),
- }
-}
-
-// Sampler that tries to uniformly sample traces with a given probability.
-// The probability of sampling a trace is equal to that of the specified probability.
-type ProbabilitySampler struct {
- // The desired probability of sampling. Must be within [0.0, 1.0].
- SamplingProbability float64 `protobuf:"fixed64,1,opt,name=samplingProbability,proto3" json:"samplingProbability,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ProbabilitySampler) Reset() { *m = ProbabilitySampler{} }
-func (m *ProbabilitySampler) String() string { return proto.CompactTextString(m) }
-func (*ProbabilitySampler) ProtoMessage() {}
-func (*ProbabilitySampler) Descriptor() ([]byte, []int) {
- return fileDescriptor_5359209b41ff50c5, []int{1}
-}
-
-func (m *ProbabilitySampler) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ProbabilitySampler.Unmarshal(m, b)
-}
-func (m *ProbabilitySampler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ProbabilitySampler.Marshal(b, m, deterministic)
-}
-func (m *ProbabilitySampler) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ProbabilitySampler.Merge(m, src)
-}
-func (m *ProbabilitySampler) XXX_Size() int {
- return xxx_messageInfo_ProbabilitySampler.Size(m)
-}
-func (m *ProbabilitySampler) XXX_DiscardUnknown() {
- xxx_messageInfo_ProbabilitySampler.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ProbabilitySampler proto.InternalMessageInfo
-
-func (m *ProbabilitySampler) GetSamplingProbability() float64 {
- if m != nil {
- return m.SamplingProbability
- }
- return 0
-}
-
-// Sampler that always makes a constant decision on span sampling.
-type ConstantSampler struct {
- Decision ConstantSampler_ConstantDecision `protobuf:"varint,1,opt,name=decision,proto3,enum=opencensus.proto.trace.v1.ConstantSampler_ConstantDecision" json:"decision,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ConstantSampler) Reset() { *m = ConstantSampler{} }
-func (m *ConstantSampler) String() string { return proto.CompactTextString(m) }
-func (*ConstantSampler) ProtoMessage() {}
-func (*ConstantSampler) Descriptor() ([]byte, []int) {
- return fileDescriptor_5359209b41ff50c5, []int{2}
-}
-
-func (m *ConstantSampler) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ConstantSampler.Unmarshal(m, b)
-}
-func (m *ConstantSampler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ConstantSampler.Marshal(b, m, deterministic)
-}
-func (m *ConstantSampler) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ConstantSampler.Merge(m, src)
-}
-func (m *ConstantSampler) XXX_Size() int {
- return xxx_messageInfo_ConstantSampler.Size(m)
-}
-func (m *ConstantSampler) XXX_DiscardUnknown() {
- xxx_messageInfo_ConstantSampler.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ConstantSampler proto.InternalMessageInfo
-
-func (m *ConstantSampler) GetDecision() ConstantSampler_ConstantDecision {
- if m != nil {
- return m.Decision
- }
- return ConstantSampler_ALWAYS_OFF
-}
-
-// Sampler that tries to sample with a rate per time window.
-type RateLimitingSampler struct {
- // Rate per second.
- Qps int64 `protobuf:"varint,1,opt,name=qps,proto3" json:"qps,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *RateLimitingSampler) Reset() { *m = RateLimitingSampler{} }
-func (m *RateLimitingSampler) String() string { return proto.CompactTextString(m) }
-func (*RateLimitingSampler) ProtoMessage() {}
-func (*RateLimitingSampler) Descriptor() ([]byte, []int) {
- return fileDescriptor_5359209b41ff50c5, []int{3}
-}
-
-func (m *RateLimitingSampler) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_RateLimitingSampler.Unmarshal(m, b)
-}
-func (m *RateLimitingSampler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_RateLimitingSampler.Marshal(b, m, deterministic)
-}
-func (m *RateLimitingSampler) XXX_Merge(src proto.Message) {
- xxx_messageInfo_RateLimitingSampler.Merge(m, src)
-}
-func (m *RateLimitingSampler) XXX_Size() int {
- return xxx_messageInfo_RateLimitingSampler.Size(m)
-}
-func (m *RateLimitingSampler) XXX_DiscardUnknown() {
- xxx_messageInfo_RateLimitingSampler.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_RateLimitingSampler proto.InternalMessageInfo
-
-func (m *RateLimitingSampler) GetQps() int64 {
- if m != nil {
- return m.Qps
- }
- return 0
-}
-
-func init() {
- proto.RegisterEnum("opencensus.proto.trace.v1.ConstantSampler_ConstantDecision", ConstantSampler_ConstantDecision_name, ConstantSampler_ConstantDecision_value)
- proto.RegisterType((*TraceConfig)(nil), "opencensus.proto.trace.v1.TraceConfig")
- proto.RegisterType((*ProbabilitySampler)(nil), "opencensus.proto.trace.v1.ProbabilitySampler")
- proto.RegisterType((*ConstantSampler)(nil), "opencensus.proto.trace.v1.ConstantSampler")
- proto.RegisterType((*RateLimitingSampler)(nil), "opencensus.proto.trace.v1.RateLimitingSampler")
-}
-
-func init() {
- proto.RegisterFile("opencensus/proto/trace/v1/trace_config.proto", fileDescriptor_5359209b41ff50c5)
-}
-
-var fileDescriptor_5359209b41ff50c5 = []byte{
- // 486 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xc1, 0x4e, 0xdb, 0x40,
- 0x10, 0x86, 0x31, 0xa1, 0x50, 0x06, 0x01, 0xee, 0x5a, 0x54, 0x46, 0xe2, 0x80, 0x7c, 0x29, 0xaa,
- 0x6a, 0xbb, 0xd0, 0x43, 0x55, 0x55, 0xaa, 0x94, 0x00, 0x51, 0x0f, 0x69, 0x88, 0x0c, 0x52, 0xd4,
- 0x5e, 0xdc, 0xb5, 0xd9, 0xb8, 0xab, 0xc6, 0xb3, 0xae, 0x77, 0x1d, 0xd1, 0x77, 0xe9, 0x43, 0xf4,
- 0x11, 0xab, 0xac, 0x5d, 0xdb, 0x49, 0x00, 0x71, 0xdb, 0xf9, 0xff, 0xf9, 0x7e, 0xaf, 0xbc, 0x33,
- 0xf0, 0x46, 0x64, 0x0c, 0x63, 0x86, 0xb2, 0x90, 0x7e, 0x96, 0x0b, 0x25, 0x7c, 0x95, 0xd3, 0x98,
- 0xf9, 0xb3, 0xd3, 0xf2, 0x10, 0xc6, 0x02, 0x27, 0x3c, 0xf1, 0xb4, 0x47, 0x0e, 0x9b, 0xee, 0x52,
- 0xf1, 0x74, 0x93, 0x37, 0x3b, 0x75, 0xfe, 0x6c, 0xc0, 0xce, 0xcd, 0xbc, 0x38, 0xd7, 0x00, 0xf9,
- 0x0e, 0x56, 0x96, 0x8b, 0x88, 0x46, 0x7c, 0xca, 0xd5, 0xef, 0x50, 0xd2, 0x34, 0x9b, 0xb2, 0xdc,
- 0x36, 0x8e, 0x8d, 0x93, 0x9d, 0x33, 0xd7, 0x7b, 0x30, 0xc8, 0x1b, 0x35, 0xd4, 0x75, 0x09, 0x7d,
- 0x5e, 0x0b, 0x48, 0xb6, 0xa2, 0x92, 0x31, 0x98, 0xb1, 0x40, 0xa9, 0x28, 0xaa, 0x3a, 0x7e, 0x5d,
- 0xc7, 0xbf, 0x7e, 0x24, 0xfe, 0xbc, 0x42, 0x9a, 0xec, 0xfd, 0x78, 0x51, 0x22, 0xb7, 0x70, 0x90,
- 0x53, 0xc5, 0xc2, 0x29, 0x4f, 0xb9, 0xe2, 0x98, 0xd4, 0xe9, 0x1d, 0x9d, 0xee, 0x3d, 0x92, 0x1e,
- 0x50, 0xc5, 0x06, 0x15, 0xd6, 0x7c, 0xc1, 0xca, 0x57, 0x65, 0xf2, 0x1e, 0xec, 0x94, 0xde, 0x85,
- 0x58, 0xa4, 0x11, 0xcb, 0x43, 0x31, 0x09, 0xa9, 0x52, 0x39, 0x8f, 0x0a, 0xc5, 0xa4, 0xbd, 0x71,
- 0x6c, 0x9c, 0x74, 0x82, 0x83, 0x94, 0xde, 0x0d, 0xb5, 0x7d, 0x35, 0xe9, 0xd6, 0x26, 0xf9, 0x00,
- 0x87, 0x4b, 0x20, 0xa2, 0x50, 0x54, 0x71, 0x81, 0xd2, 0x7e, 0xa6, 0xc9, 0x97, 0x6d, 0xb2, 0x71,
- 0xc9, 0x27, 0x38, 0x5a, 0x44, 0x53, 0x26, 0x25, 0x4d, 0x58, 0xc8, 0x66, 0x0c, 0x95, 0xb4, 0x37,
- 0x35, 0x6d, 0xb7, 0xe8, 0x2f, 0x65, 0xc3, 0xa5, 0xf6, 0x89, 0x0b, 0xd6, 0x22, 0x3f, 0xe5, 0xf8,
- 0x53, 0xda, 0x5b, 0x1a, 0x33, 0x5b, 0xd8, 0x60, 0xae, 0xf7, 0xb6, 0x61, 0xab, 0xfa, 0x75, 0x4e,
- 0x1f, 0xc8, 0xea, 0xc3, 0x92, 0xb7, 0x60, 0xe9, 0x06, 0x8e, 0x49, 0xcb, 0xd5, 0x43, 0x62, 0x04,
- 0xf7, 0x59, 0xce, 0x5f, 0x03, 0xf6, 0x97, 0x9e, 0x90, 0x8c, 0xe1, 0xf9, 0x2d, 0x8b, 0xb9, 0xe4,
- 0x02, 0x35, 0xba, 0x77, 0xf6, 0xf1, 0xe9, 0x03, 0x50, 0xd7, 0x17, 0x55, 0x44, 0x50, 0x87, 0x39,
- 0x17, 0x60, 0x2e, 0xbb, 0x64, 0x0f, 0xa0, 0x3b, 0x18, 0x77, 0xbf, 0x5e, 0x87, 0x57, 0xfd, 0xbe,
- 0xb9, 0x46, 0x76, 0x61, 0xfb, 0x7f, 0x3d, 0x34, 0x0d, 0xf2, 0x02, 0x76, 0xab, 0x72, 0xd4, 0x0d,
- 0x2e, 0x87, 0x37, 0xe6, 0xba, 0xf3, 0x0a, 0xac, 0x7b, 0xc6, 0x82, 0x98, 0xd0, 0xf9, 0x95, 0x49,
- 0x7d, 0xe1, 0x4e, 0x30, 0x3f, 0xf6, 0x66, 0x70, 0xc4, 0xc5, 0xc3, 0x37, 0xef, 0x99, 0xad, 0xfd,
- 0x1a, 0xcd, 0xad, 0x91, 0xf1, 0xad, 0x97, 0x70, 0xf5, 0xa3, 0x88, 0xbc, 0x58, 0xa4, 0x7e, 0x49,
- 0xb9, 0x1c, 0xa5, 0xca, 0x8b, 0x94, 0x61, 0xf9, 0xea, 0x7e, 0x13, 0xe8, 0x96, 0x1b, 0x9e, 0x30,
- 0x74, 0x93, 0x66, 0xd1, 0xa3, 0x4d, 0x2d, 0xbf, 0xfb, 0x17, 0x00, 0x00, 0xff, 0xff, 0x13, 0xe2,
- 0xd9, 0x56, 0x0c, 0x04, 0x00, 0x00,
-}
diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go
deleted file mode 100644
index ada2b78e8..000000000
--- a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go
+++ /dev/null
@@ -1,1271 +0,0 @@
-// Go support for Protocol Buffers - Google's data interchange format
-//
-// Copyright 2015 The Go Authors. All rights reserved.
-// https://github.com/golang/protobuf
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/*
-Package jsonpb provides marshaling and unmarshaling between protocol buffers and JSON.
-It follows the specification at https://developers.google.com/protocol-buffers/docs/proto3#json.
-
-This package produces a different output than the standard "encoding/json" package,
-which does not operate correctly on protocol buffers.
-*/
-package jsonpb
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "math"
- "reflect"
- "sort"
- "strconv"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
-
- stpb "github.com/golang/protobuf/ptypes/struct"
-)
-
-const secondInNanos = int64(time.Second / time.Nanosecond)
-
-// Marshaler is a configurable object for converting between
-// protocol buffer objects and a JSON representation for them.
-type Marshaler struct {
- // Whether to render enum values as integers, as opposed to string values.
- EnumsAsInts bool
-
- // Whether to render fields with zero values.
- EmitDefaults bool
-
- // A string to indent each level by. The presence of this field will
- // also cause a space to appear between the field separator and
- // value, and for newlines to be appear between fields and array
- // elements.
- Indent string
-
- // Whether to use the original (.proto) name for fields.
- OrigName bool
-
- // A custom URL resolver to use when marshaling Any messages to JSON.
- // If unset, the default resolution strategy is to extract the
- // fully-qualified type name from the type URL and pass that to
- // proto.MessageType(string).
- AnyResolver AnyResolver
-}
-
-// AnyResolver takes a type URL, present in an Any message, and resolves it into
-// an instance of the associated message.
-type AnyResolver interface {
- Resolve(typeUrl string) (proto.Message, error)
-}
-
-func defaultResolveAny(typeUrl string) (proto.Message, error) {
- // Only the part of typeUrl after the last slash is relevant.
- mname := typeUrl
- if slash := strings.LastIndex(mname, "/"); slash >= 0 {
- mname = mname[slash+1:]
- }
- mt := proto.MessageType(mname)
- if mt == nil {
- return nil, fmt.Errorf("unknown message type %q", mname)
- }
- return reflect.New(mt.Elem()).Interface().(proto.Message), nil
-}
-
-// JSONPBMarshaler is implemented by protobuf messages that customize the
-// way they are marshaled to JSON. Messages that implement this should
-// also implement JSONPBUnmarshaler so that the custom format can be
-// parsed.
-//
-// The JSON marshaling must follow the proto to JSON specification:
-// https://developers.google.com/protocol-buffers/docs/proto3#json
-type JSONPBMarshaler interface {
- MarshalJSONPB(*Marshaler) ([]byte, error)
-}
-
-// JSONPBUnmarshaler is implemented by protobuf messages that customize
-// the way they are unmarshaled from JSON. Messages that implement this
-// should also implement JSONPBMarshaler so that the custom format can be
-// produced.
-//
-// The JSON unmarshaling must follow the JSON to proto specification:
-// https://developers.google.com/protocol-buffers/docs/proto3#json
-type JSONPBUnmarshaler interface {
- UnmarshalJSONPB(*Unmarshaler, []byte) error
-}
-
-// Marshal marshals a protocol buffer into JSON.
-func (m *Marshaler) Marshal(out io.Writer, pb proto.Message) error {
- v := reflect.ValueOf(pb)
- if pb == nil || (v.Kind() == reflect.Ptr && v.IsNil()) {
- return errors.New("Marshal called with nil")
- }
- // Check for unset required fields first.
- if err := checkRequiredFields(pb); err != nil {
- return err
- }
- writer := &errWriter{writer: out}
- return m.marshalObject(writer, pb, "", "")
-}
-
-// MarshalToString converts a protocol buffer object to JSON string.
-func (m *Marshaler) MarshalToString(pb proto.Message) (string, error) {
- var buf bytes.Buffer
- if err := m.Marshal(&buf, pb); err != nil {
- return "", err
- }
- return buf.String(), nil
-}
-
-type int32Slice []int32
-
-var nonFinite = map[string]float64{
- `"NaN"`: math.NaN(),
- `"Infinity"`: math.Inf(1),
- `"-Infinity"`: math.Inf(-1),
-}
-
-// For sorting extensions ids to ensure stable output.
-func (s int32Slice) Len() int { return len(s) }
-func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] }
-func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-
-type wkt interface {
- XXX_WellKnownType() string
-}
-
-// marshalObject writes a struct to the Writer.
-func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeURL string) error {
- if jsm, ok := v.(JSONPBMarshaler); ok {
- b, err := jsm.MarshalJSONPB(m)
- if err != nil {
- return err
- }
- if typeURL != "" {
- // we are marshaling this object to an Any type
- var js map[string]*json.RawMessage
- if err = json.Unmarshal(b, &js); err != nil {
- return fmt.Errorf("type %T produced invalid JSON: %v", v, err)
- }
- turl, err := json.Marshal(typeURL)
- if err != nil {
- return fmt.Errorf("failed to marshal type URL %q to JSON: %v", typeURL, err)
- }
- js["@type"] = (*json.RawMessage)(&turl)
- if b, err = json.Marshal(js); err != nil {
- return err
- }
- }
-
- out.write(string(b))
- return out.err
- }
-
- s := reflect.ValueOf(v).Elem()
-
- // Handle well-known types.
- if wkt, ok := v.(wkt); ok {
- switch wkt.XXX_WellKnownType() {
- case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value",
- "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue":
- // "Wrappers use the same representation in JSON
- // as the wrapped primitive type, ..."
- sprop := proto.GetProperties(s.Type())
- return m.marshalValue(out, sprop.Prop[0], s.Field(0), indent)
- case "Any":
- // Any is a bit more involved.
- return m.marshalAny(out, v, indent)
- case "Duration":
- // "Generated output always contains 0, 3, 6, or 9 fractional digits,
- // depending on required precision."
- s, ns := s.Field(0).Int(), s.Field(1).Int()
- if ns <= -secondInNanos || ns >= secondInNanos {
- return fmt.Errorf("ns out of range (%v, %v)", -secondInNanos, secondInNanos)
- }
- if (s > 0 && ns < 0) || (s < 0 && ns > 0) {
- return errors.New("signs of seconds and nanos do not match")
- }
- if s < 0 {
- ns = -ns
- }
- x := fmt.Sprintf("%d.%09d", s, ns)
- x = strings.TrimSuffix(x, "000")
- x = strings.TrimSuffix(x, "000")
- x = strings.TrimSuffix(x, ".000")
- out.write(`"`)
- out.write(x)
- out.write(`s"`)
- return out.err
- case "Struct", "ListValue":
- // Let marshalValue handle the `Struct.fields` map or the `ListValue.values` slice.
- // TODO: pass the correct Properties if needed.
- return m.marshalValue(out, &proto.Properties{}, s.Field(0), indent)
- case "Timestamp":
- // "RFC 3339, where generated output will always be Z-normalized
- // and uses 0, 3, 6 or 9 fractional digits."
- s, ns := s.Field(0).Int(), s.Field(1).Int()
- if ns < 0 || ns >= secondInNanos {
- return fmt.Errorf("ns out of range [0, %v)", secondInNanos)
- }
- t := time.Unix(s, ns).UTC()
- // time.RFC3339Nano isn't exactly right (we need to get 3/6/9 fractional digits).
- x := t.Format("2006-01-02T15:04:05.000000000")
- x = strings.TrimSuffix(x, "000")
- x = strings.TrimSuffix(x, "000")
- x = strings.TrimSuffix(x, ".000")
- out.write(`"`)
- out.write(x)
- out.write(`Z"`)
- return out.err
- case "Value":
- // Value has a single oneof.
- kind := s.Field(0)
- if kind.IsNil() {
- // "absence of any variant indicates an error"
- return errors.New("nil Value")
- }
- // oneof -> *T -> T -> T.F
- x := kind.Elem().Elem().Field(0)
- // TODO: pass the correct Properties if needed.
- return m.marshalValue(out, &proto.Properties{}, x, indent)
- }
- }
-
- out.write("{")
- if m.Indent != "" {
- out.write("\n")
- }
-
- firstField := true
-
- if typeURL != "" {
- if err := m.marshalTypeURL(out, indent, typeURL); err != nil {
- return err
- }
- firstField = false
- }
-
- for i := 0; i < s.NumField(); i++ {
- value := s.Field(i)
- valueField := s.Type().Field(i)
- if strings.HasPrefix(valueField.Name, "XXX_") {
- continue
- }
-
- // IsNil will panic on most value kinds.
- switch value.Kind() {
- case reflect.Chan, reflect.Func, reflect.Interface:
- if value.IsNil() {
- continue
- }
- }
-
- if !m.EmitDefaults {
- switch value.Kind() {
- case reflect.Bool:
- if !value.Bool() {
- continue
- }
- case reflect.Int32, reflect.Int64:
- if value.Int() == 0 {
- continue
- }
- case reflect.Uint32, reflect.Uint64:
- if value.Uint() == 0 {
- continue
- }
- case reflect.Float32, reflect.Float64:
- if value.Float() == 0 {
- continue
- }
- case reflect.String:
- if value.Len() == 0 {
- continue
- }
- case reflect.Map, reflect.Ptr, reflect.Slice:
- if value.IsNil() {
- continue
- }
- }
- }
-
- // Oneof fields need special handling.
- if valueField.Tag.Get("protobuf_oneof") != "" {
- // value is an interface containing &T{real_value}.
- sv := value.Elem().Elem() // interface -> *T -> T
- value = sv.Field(0)
- valueField = sv.Type().Field(0)
- }
- prop := jsonProperties(valueField, m.OrigName)
- if !firstField {
- m.writeSep(out)
- }
- if err := m.marshalField(out, prop, value, indent); err != nil {
- return err
- }
- firstField = false
- }
-
- // Handle proto2 extensions.
- if ep, ok := v.(proto.Message); ok {
- extensions := proto.RegisteredExtensions(v)
- // Sort extensions for stable output.
- ids := make([]int32, 0, len(extensions))
- for id, desc := range extensions {
- if !proto.HasExtension(ep, desc) {
- continue
- }
- ids = append(ids, id)
- }
- sort.Sort(int32Slice(ids))
- for _, id := range ids {
- desc := extensions[id]
- if desc == nil {
- // unknown extension
- continue
- }
- ext, extErr := proto.GetExtension(ep, desc)
- if extErr != nil {
- return extErr
- }
- value := reflect.ValueOf(ext)
- var prop proto.Properties
- prop.Parse(desc.Tag)
- prop.JSONName = fmt.Sprintf("[%s]", desc.Name)
- if !firstField {
- m.writeSep(out)
- }
- if err := m.marshalField(out, &prop, value, indent); err != nil {
- return err
- }
- firstField = false
- }
-
- }
-
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- }
- out.write("}")
- return out.err
-}
-
-func (m *Marshaler) writeSep(out *errWriter) {
- if m.Indent != "" {
- out.write(",\n")
- } else {
- out.write(",")
- }
-}
-
-func (m *Marshaler) marshalAny(out *errWriter, any proto.Message, indent string) error {
- // "If the Any contains a value that has a special JSON mapping,
- // it will be converted as follows: {"@type": xxx, "value": yyy}.
- // Otherwise, the value will be converted into a JSON object,
- // and the "@type" field will be inserted to indicate the actual data type."
- v := reflect.ValueOf(any).Elem()
- turl := v.Field(0).String()
- val := v.Field(1).Bytes()
-
- var msg proto.Message
- var err error
- if m.AnyResolver != nil {
- msg, err = m.AnyResolver.Resolve(turl)
- } else {
- msg, err = defaultResolveAny(turl)
- }
- if err != nil {
- return err
- }
-
- if err := proto.Unmarshal(val, msg); err != nil {
- return err
- }
-
- if _, ok := msg.(wkt); ok {
- out.write("{")
- if m.Indent != "" {
- out.write("\n")
- }
- if err := m.marshalTypeURL(out, indent, turl); err != nil {
- return err
- }
- m.writeSep(out)
- if m.Indent != "" {
- out.write(indent)
- out.write(m.Indent)
- out.write(`"value": `)
- } else {
- out.write(`"value":`)
- }
- if err := m.marshalObject(out, msg, indent+m.Indent, ""); err != nil {
- return err
- }
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- }
- out.write("}")
- return out.err
- }
-
- return m.marshalObject(out, msg, indent, turl)
-}
-
-func (m *Marshaler) marshalTypeURL(out *errWriter, indent, typeURL string) error {
- if m.Indent != "" {
- out.write(indent)
- out.write(m.Indent)
- }
- out.write(`"@type":`)
- if m.Indent != "" {
- out.write(" ")
- }
- b, err := json.Marshal(typeURL)
- if err != nil {
- return err
- }
- out.write(string(b))
- return out.err
-}
-
-// marshalField writes field description and value to the Writer.
-func (m *Marshaler) marshalField(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error {
- if m.Indent != "" {
- out.write(indent)
- out.write(m.Indent)
- }
- out.write(`"`)
- out.write(prop.JSONName)
- out.write(`":`)
- if m.Indent != "" {
- out.write(" ")
- }
- if err := m.marshalValue(out, prop, v, indent); err != nil {
- return err
- }
- return nil
-}
-
-// marshalValue writes the value to the Writer.
-func (m *Marshaler) marshalValue(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error {
- var err error
- v = reflect.Indirect(v)
-
- // Handle nil pointer
- if v.Kind() == reflect.Invalid {
- out.write("null")
- return out.err
- }
-
- // Handle repeated elements.
- if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
- out.write("[")
- comma := ""
- for i := 0; i < v.Len(); i++ {
- sliceVal := v.Index(i)
- out.write(comma)
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- out.write(m.Indent)
- out.write(m.Indent)
- }
- if err := m.marshalValue(out, prop, sliceVal, indent+m.Indent); err != nil {
- return err
- }
- comma = ","
- }
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- out.write(m.Indent)
- }
- out.write("]")
- return out.err
- }
-
- // Handle well-known types.
- // Most are handled up in marshalObject (because 99% are messages).
- if wkt, ok := v.Interface().(wkt); ok {
- switch wkt.XXX_WellKnownType() {
- case "NullValue":
- out.write("null")
- return out.err
- }
- }
-
- // Handle enumerations.
- if !m.EnumsAsInts && prop.Enum != "" {
- // Unknown enum values will are stringified by the proto library as their
- // value. Such values should _not_ be quoted or they will be interpreted
- // as an enum string instead of their value.
- enumStr := v.Interface().(fmt.Stringer).String()
- var valStr string
- if v.Kind() == reflect.Ptr {
- valStr = strconv.Itoa(int(v.Elem().Int()))
- } else {
- valStr = strconv.Itoa(int(v.Int()))
- }
- isKnownEnum := enumStr != valStr
- if isKnownEnum {
- out.write(`"`)
- }
- out.write(enumStr)
- if isKnownEnum {
- out.write(`"`)
- }
- return out.err
- }
-
- // Handle nested messages.
- if v.Kind() == reflect.Struct {
- return m.marshalObject(out, v.Addr().Interface().(proto.Message), indent+m.Indent, "")
- }
-
- // Handle maps.
- // Since Go randomizes map iteration, we sort keys for stable output.
- if v.Kind() == reflect.Map {
- out.write(`{`)
- keys := v.MapKeys()
- sort.Sort(mapKeys(keys))
- for i, k := range keys {
- if i > 0 {
- out.write(`,`)
- }
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- out.write(m.Indent)
- out.write(m.Indent)
- }
-
- // TODO handle map key prop properly
- b, err := json.Marshal(k.Interface())
- if err != nil {
- return err
- }
- s := string(b)
-
- // If the JSON is not a string value, encode it again to make it one.
- if !strings.HasPrefix(s, `"`) {
- b, err := json.Marshal(s)
- if err != nil {
- return err
- }
- s = string(b)
- }
-
- out.write(s)
- out.write(`:`)
- if m.Indent != "" {
- out.write(` `)
- }
-
- vprop := prop
- if prop != nil && prop.MapValProp != nil {
- vprop = prop.MapValProp
- }
- if err := m.marshalValue(out, vprop, v.MapIndex(k), indent+m.Indent); err != nil {
- return err
- }
- }
- if m.Indent != "" {
- out.write("\n")
- out.write(indent)
- out.write(m.Indent)
- }
- out.write(`}`)
- return out.err
- }
-
- // Handle non-finite floats, e.g. NaN, Infinity and -Infinity.
- if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 {
- f := v.Float()
- var sval string
- switch {
- case math.IsInf(f, 1):
- sval = `"Infinity"`
- case math.IsInf(f, -1):
- sval = `"-Infinity"`
- case math.IsNaN(f):
- sval = `"NaN"`
- }
- if sval != "" {
- out.write(sval)
- return out.err
- }
- }
-
- // Default handling defers to the encoding/json library.
- b, err := json.Marshal(v.Interface())
- if err != nil {
- return err
- }
- needToQuote := string(b[0]) != `"` && (v.Kind() == reflect.Int64 || v.Kind() == reflect.Uint64)
- if needToQuote {
- out.write(`"`)
- }
- out.write(string(b))
- if needToQuote {
- out.write(`"`)
- }
- return out.err
-}
-
-// Unmarshaler is a configurable object for converting from a JSON
-// representation to a protocol buffer object.
-type Unmarshaler struct {
- // Whether to allow messages to contain unknown fields, as opposed to
- // failing to unmarshal.
- AllowUnknownFields bool
-
- // A custom URL resolver to use when unmarshaling Any messages from JSON.
- // If unset, the default resolution strategy is to extract the
- // fully-qualified type name from the type URL and pass that to
- // proto.MessageType(string).
- AnyResolver AnyResolver
-}
-
-// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream.
-// This function is lenient and will decode any options permutations of the
-// related Marshaler.
-func (u *Unmarshaler) UnmarshalNext(dec *json.Decoder, pb proto.Message) error {
- inputValue := json.RawMessage{}
- if err := dec.Decode(&inputValue); err != nil {
- return err
- }
- if err := u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil); err != nil {
- return err
- }
- return checkRequiredFields(pb)
-}
-
-// Unmarshal unmarshals a JSON object stream into a protocol
-// buffer. This function is lenient and will decode any options
-// permutations of the related Marshaler.
-func (u *Unmarshaler) Unmarshal(r io.Reader, pb proto.Message) error {
- dec := json.NewDecoder(r)
- return u.UnmarshalNext(dec, pb)
-}
-
-// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream.
-// This function is lenient and will decode any options permutations of the
-// related Marshaler.
-func UnmarshalNext(dec *json.Decoder, pb proto.Message) error {
- return new(Unmarshaler).UnmarshalNext(dec, pb)
-}
-
-// Unmarshal unmarshals a JSON object stream into a protocol
-// buffer. This function is lenient and will decode any options
-// permutations of the related Marshaler.
-func Unmarshal(r io.Reader, pb proto.Message) error {
- return new(Unmarshaler).Unmarshal(r, pb)
-}
-
-// UnmarshalString will populate the fields of a protocol buffer based
-// on a JSON string. This function is lenient and will decode any options
-// permutations of the related Marshaler.
-func UnmarshalString(str string, pb proto.Message) error {
- return new(Unmarshaler).Unmarshal(strings.NewReader(str), pb)
-}
-
-// unmarshalValue converts/copies a value into the target.
-// prop may be nil.
-func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMessage, prop *proto.Properties) error {
- targetType := target.Type()
-
- // Allocate memory for pointer fields.
- if targetType.Kind() == reflect.Ptr {
- // If input value is "null" and target is a pointer type, then the field should be treated as not set
- // UNLESS the target is structpb.Value, in which case it should be set to structpb.NullValue.
- _, isJSONPBUnmarshaler := target.Interface().(JSONPBUnmarshaler)
- if string(inputValue) == "null" && targetType != reflect.TypeOf(&stpb.Value{}) && !isJSONPBUnmarshaler {
- return nil
- }
- target.Set(reflect.New(targetType.Elem()))
-
- return u.unmarshalValue(target.Elem(), inputValue, prop)
- }
-
- if jsu, ok := target.Addr().Interface().(JSONPBUnmarshaler); ok {
- return jsu.UnmarshalJSONPB(u, []byte(inputValue))
- }
-
- // Handle well-known types that are not pointers.
- if w, ok := target.Addr().Interface().(wkt); ok {
- switch w.XXX_WellKnownType() {
- case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value",
- "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue":
- return u.unmarshalValue(target.Field(0), inputValue, prop)
- case "Any":
- // Use json.RawMessage pointer type instead of value to support pre-1.8 version.
- // 1.8 changed RawMessage.MarshalJSON from pointer type to value type, see
- // https://github.com/golang/go/issues/14493
- var jsonFields map[string]*json.RawMessage
- if err := json.Unmarshal(inputValue, &jsonFields); err != nil {
- return err
- }
-
- val, ok := jsonFields["@type"]
- if !ok || val == nil {
- return errors.New("Any JSON doesn't have '@type'")
- }
-
- var turl string
- if err := json.Unmarshal([]byte(*val), &turl); err != nil {
- return fmt.Errorf("can't unmarshal Any's '@type': %q", *val)
- }
- target.Field(0).SetString(turl)
-
- var m proto.Message
- var err error
- if u.AnyResolver != nil {
- m, err = u.AnyResolver.Resolve(turl)
- } else {
- m, err = defaultResolveAny(turl)
- }
- if err != nil {
- return err
- }
-
- if _, ok := m.(wkt); ok {
- val, ok := jsonFields["value"]
- if !ok {
- return errors.New("Any JSON doesn't have 'value'")
- }
-
- if err := u.unmarshalValue(reflect.ValueOf(m).Elem(), *val, nil); err != nil {
- return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err)
- }
- } else {
- delete(jsonFields, "@type")
- nestedProto, err := json.Marshal(jsonFields)
- if err != nil {
- return fmt.Errorf("can't generate JSON for Any's nested proto to be unmarshaled: %v", err)
- }
-
- if err = u.unmarshalValue(reflect.ValueOf(m).Elem(), nestedProto, nil); err != nil {
- return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err)
- }
- }
-
- b, err := proto.Marshal(m)
- if err != nil {
- return fmt.Errorf("can't marshal proto %T into Any.Value: %v", m, err)
- }
- target.Field(1).SetBytes(b)
-
- return nil
- case "Duration":
- unq, err := unquote(string(inputValue))
- if err != nil {
- return err
- }
-
- d, err := time.ParseDuration(unq)
- if err != nil {
- return fmt.Errorf("bad Duration: %v", err)
- }
-
- ns := d.Nanoseconds()
- s := ns / 1e9
- ns %= 1e9
- target.Field(0).SetInt(s)
- target.Field(1).SetInt(ns)
- return nil
- case "Timestamp":
- unq, err := unquote(string(inputValue))
- if err != nil {
- return err
- }
-
- t, err := time.Parse(time.RFC3339Nano, unq)
- if err != nil {
- return fmt.Errorf("bad Timestamp: %v", err)
- }
-
- target.Field(0).SetInt(t.Unix())
- target.Field(1).SetInt(int64(t.Nanosecond()))
- return nil
- case "Struct":
- var m map[string]json.RawMessage
- if err := json.Unmarshal(inputValue, &m); err != nil {
- return fmt.Errorf("bad StructValue: %v", err)
- }
-
- target.Field(0).Set(reflect.ValueOf(map[string]*stpb.Value{}))
- for k, jv := range m {
- pv := &stpb.Value{}
- if err := u.unmarshalValue(reflect.ValueOf(pv).Elem(), jv, prop); err != nil {
- return fmt.Errorf("bad value in StructValue for key %q: %v", k, err)
- }
- target.Field(0).SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(pv))
- }
- return nil
- case "ListValue":
- var s []json.RawMessage
- if err := json.Unmarshal(inputValue, &s); err != nil {
- return fmt.Errorf("bad ListValue: %v", err)
- }
-
- target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s))))
- for i, sv := range s {
- if err := u.unmarshalValue(target.Field(0).Index(i), sv, prop); err != nil {
- return err
- }
- }
- return nil
- case "Value":
- ivStr := string(inputValue)
- if ivStr == "null" {
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_NullValue{}))
- } else if v, err := strconv.ParseFloat(ivStr, 0); err == nil {
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_NumberValue{v}))
- } else if v, err := unquote(ivStr); err == nil {
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_StringValue{v}))
- } else if v, err := strconv.ParseBool(ivStr); err == nil {
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_BoolValue{v}))
- } else if err := json.Unmarshal(inputValue, &[]json.RawMessage{}); err == nil {
- lv := &stpb.ListValue{}
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_ListValue{lv}))
- return u.unmarshalValue(reflect.ValueOf(lv).Elem(), inputValue, prop)
- } else if err := json.Unmarshal(inputValue, &map[string]json.RawMessage{}); err == nil {
- sv := &stpb.Struct{}
- target.Field(0).Set(reflect.ValueOf(&stpb.Value_StructValue{sv}))
- return u.unmarshalValue(reflect.ValueOf(sv).Elem(), inputValue, prop)
- } else {
- return fmt.Errorf("unrecognized type for Value %q", ivStr)
- }
- return nil
- }
- }
-
- // Handle enums, which have an underlying type of int32,
- // and may appear as strings.
- // The case of an enum appearing as a number is handled
- // at the bottom of this function.
- if inputValue[0] == '"' && prop != nil && prop.Enum != "" {
- vmap := proto.EnumValueMap(prop.Enum)
- // Don't need to do unquoting; valid enum names
- // are from a limited character set.
- s := inputValue[1 : len(inputValue)-1]
- n, ok := vmap[string(s)]
- if !ok {
- return fmt.Errorf("unknown value %q for enum %s", s, prop.Enum)
- }
- if target.Kind() == reflect.Ptr { // proto2
- target.Set(reflect.New(targetType.Elem()))
- target = target.Elem()
- }
- if targetType.Kind() != reflect.Int32 {
- return fmt.Errorf("invalid target %q for enum %s", targetType.Kind(), prop.Enum)
- }
- target.SetInt(int64(n))
- return nil
- }
-
- // Handle nested messages.
- if targetType.Kind() == reflect.Struct {
- var jsonFields map[string]json.RawMessage
- if err := json.Unmarshal(inputValue, &jsonFields); err != nil {
- return err
- }
-
- consumeField := func(prop *proto.Properties) (json.RawMessage, bool) {
- // Be liberal in what names we accept; both orig_name and camelName are okay.
- fieldNames := acceptedJSONFieldNames(prop)
-
- vOrig, okOrig := jsonFields[fieldNames.orig]
- vCamel, okCamel := jsonFields[fieldNames.camel]
- if !okOrig && !okCamel {
- return nil, false
- }
- // If, for some reason, both are present in the data, favour the camelName.
- var raw json.RawMessage
- if okOrig {
- raw = vOrig
- delete(jsonFields, fieldNames.orig)
- }
- if okCamel {
- raw = vCamel
- delete(jsonFields, fieldNames.camel)
- }
- return raw, true
- }
-
- sprops := proto.GetProperties(targetType)
- for i := 0; i < target.NumField(); i++ {
- ft := target.Type().Field(i)
- if strings.HasPrefix(ft.Name, "XXX_") {
- continue
- }
-
- valueForField, ok := consumeField(sprops.Prop[i])
- if !ok {
- continue
- }
-
- if err := u.unmarshalValue(target.Field(i), valueForField, sprops.Prop[i]); err != nil {
- return err
- }
- }
- // Check for any oneof fields.
- if len(jsonFields) > 0 {
- for _, oop := range sprops.OneofTypes {
- raw, ok := consumeField(oop.Prop)
- if !ok {
- continue
- }
- nv := reflect.New(oop.Type.Elem())
- target.Field(oop.Field).Set(nv)
- if err := u.unmarshalValue(nv.Elem().Field(0), raw, oop.Prop); err != nil {
- return err
- }
- }
- }
- // Handle proto2 extensions.
- if len(jsonFields) > 0 {
- if ep, ok := target.Addr().Interface().(proto.Message); ok {
- for _, ext := range proto.RegisteredExtensions(ep) {
- name := fmt.Sprintf("[%s]", ext.Name)
- raw, ok := jsonFields[name]
- if !ok {
- continue
- }
- delete(jsonFields, name)
- nv := reflect.New(reflect.TypeOf(ext.ExtensionType).Elem())
- if err := u.unmarshalValue(nv.Elem(), raw, nil); err != nil {
- return err
- }
- if err := proto.SetExtension(ep, ext, nv.Interface()); err != nil {
- return err
- }
- }
- }
- }
- if !u.AllowUnknownFields && len(jsonFields) > 0 {
- // Pick any field to be the scapegoat.
- var f string
- for fname := range jsonFields {
- f = fname
- break
- }
- return fmt.Errorf("unknown field %q in %v", f, targetType)
- }
- return nil
- }
-
- // Handle arrays (which aren't encoded bytes)
- if targetType.Kind() == reflect.Slice && targetType.Elem().Kind() != reflect.Uint8 {
- var slc []json.RawMessage
- if err := json.Unmarshal(inputValue, &slc); err != nil {
- return err
- }
- if slc != nil {
- l := len(slc)
- target.Set(reflect.MakeSlice(targetType, l, l))
- for i := 0; i < l; i++ {
- if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil {
- return err
- }
- }
- }
- return nil
- }
-
- // Handle maps (whose keys are always strings)
- if targetType.Kind() == reflect.Map {
- var mp map[string]json.RawMessage
- if err := json.Unmarshal(inputValue, &mp); err != nil {
- return err
- }
- if mp != nil {
- target.Set(reflect.MakeMap(targetType))
- for ks, raw := range mp {
- // Unmarshal map key. The core json library already decoded the key into a
- // string, so we handle that specially. Other types were quoted post-serialization.
- var k reflect.Value
- if targetType.Key().Kind() == reflect.String {
- k = reflect.ValueOf(ks)
- } else {
- k = reflect.New(targetType.Key()).Elem()
- var kprop *proto.Properties
- if prop != nil && prop.MapKeyProp != nil {
- kprop = prop.MapKeyProp
- }
- if err := u.unmarshalValue(k, json.RawMessage(ks), kprop); err != nil {
- return err
- }
- }
-
- // Unmarshal map value.
- v := reflect.New(targetType.Elem()).Elem()
- var vprop *proto.Properties
- if prop != nil && prop.MapValProp != nil {
- vprop = prop.MapValProp
- }
- if err := u.unmarshalValue(v, raw, vprop); err != nil {
- return err
- }
- target.SetMapIndex(k, v)
- }
- }
- return nil
- }
-
- // Non-finite numbers can be encoded as strings.
- isFloat := targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64
- if isFloat {
- if num, ok := nonFinite[string(inputValue)]; ok {
- target.SetFloat(num)
- return nil
- }
- }
-
- // integers & floats can be encoded as strings. In this case we drop
- // the quotes and proceed as normal.
- isNum := targetType.Kind() == reflect.Int64 || targetType.Kind() == reflect.Uint64 ||
- targetType.Kind() == reflect.Int32 || targetType.Kind() == reflect.Uint32 ||
- targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64
- if isNum && strings.HasPrefix(string(inputValue), `"`) {
- inputValue = inputValue[1 : len(inputValue)-1]
- }
-
- // Use the encoding/json for parsing other value types.
- return json.Unmarshal(inputValue, target.Addr().Interface())
-}
-
-func unquote(s string) (string, error) {
- var ret string
- err := json.Unmarshal([]byte(s), &ret)
- return ret, err
-}
-
-// jsonProperties returns parsed proto.Properties for the field and corrects JSONName attribute.
-func jsonProperties(f reflect.StructField, origName bool) *proto.Properties {
- var prop proto.Properties
- prop.Init(f.Type, f.Name, f.Tag.Get("protobuf"), &f)
- if origName || prop.JSONName == "" {
- prop.JSONName = prop.OrigName
- }
- return &prop
-}
-
-type fieldNames struct {
- orig, camel string
-}
-
-func acceptedJSONFieldNames(prop *proto.Properties) fieldNames {
- opts := fieldNames{orig: prop.OrigName, camel: prop.OrigName}
- if prop.JSONName != "" {
- opts.camel = prop.JSONName
- }
- return opts
-}
-
-// Writer wrapper inspired by https://blog.golang.org/errors-are-values
-type errWriter struct {
- writer io.Writer
- err error
-}
-
-func (w *errWriter) write(str string) {
- if w.err != nil {
- return
- }
- _, w.err = w.writer.Write([]byte(str))
-}
-
-// Map fields may have key types of non-float scalars, strings and enums.
-// The easiest way to sort them in some deterministic order is to use fmt.
-// If this turns out to be inefficient we can always consider other options,
-// such as doing a Schwartzian transform.
-//
-// Numeric keys are sorted in numeric order per
-// https://developers.google.com/protocol-buffers/docs/proto#maps.
-type mapKeys []reflect.Value
-
-func (s mapKeys) Len() int { return len(s) }
-func (s mapKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-func (s mapKeys) Less(i, j int) bool {
- if k := s[i].Kind(); k == s[j].Kind() {
- switch k {
- case reflect.String:
- return s[i].String() < s[j].String()
- case reflect.Int32, reflect.Int64:
- return s[i].Int() < s[j].Int()
- case reflect.Uint32, reflect.Uint64:
- return s[i].Uint() < s[j].Uint()
- }
- }
- return fmt.Sprint(s[i].Interface()) < fmt.Sprint(s[j].Interface())
-}
-
-// checkRequiredFields returns an error if any required field in the given proto message is not set.
-// This function is used by both Marshal and Unmarshal. While required fields only exist in a
-// proto2 message, a proto3 message can contain proto2 message(s).
-func checkRequiredFields(pb proto.Message) error {
- // Most well-known type messages do not contain required fields. The "Any" type may contain
- // a message that has required fields.
- //
- // When an Any message is being marshaled, the code will invoked proto.Unmarshal on Any.Value
- // field in order to transform that into JSON, and that should have returned an error if a
- // required field is not set in the embedded message.
- //
- // When an Any message is being unmarshaled, the code will have invoked proto.Marshal on the
- // embedded message to store the serialized message in Any.Value field, and that should have
- // returned an error if a required field is not set.
- if _, ok := pb.(wkt); ok {
- return nil
- }
-
- v := reflect.ValueOf(pb)
- // Skip message if it is not a struct pointer.
- if v.Kind() != reflect.Ptr {
- return nil
- }
- v = v.Elem()
- if v.Kind() != reflect.Struct {
- return nil
- }
-
- for i := 0; i < v.NumField(); i++ {
- field := v.Field(i)
- sfield := v.Type().Field(i)
-
- if sfield.PkgPath != "" {
- // blank PkgPath means the field is exported; skip if not exported
- continue
- }
-
- if strings.HasPrefix(sfield.Name, "XXX_") {
- continue
- }
-
- // Oneof field is an interface implemented by wrapper structs containing the actual oneof
- // field, i.e. an interface containing &T{real_value}.
- if sfield.Tag.Get("protobuf_oneof") != "" {
- if field.Kind() != reflect.Interface {
- continue
- }
- v := field.Elem()
- if v.Kind() != reflect.Ptr || v.IsNil() {
- continue
- }
- v = v.Elem()
- if v.Kind() != reflect.Struct || v.NumField() < 1 {
- continue
- }
- field = v.Field(0)
- sfield = v.Type().Field(0)
- }
-
- protoTag := sfield.Tag.Get("protobuf")
- if protoTag == "" {
- continue
- }
- var prop proto.Properties
- prop.Init(sfield.Type, sfield.Name, protoTag, &sfield)
-
- switch field.Kind() {
- case reflect.Map:
- if field.IsNil() {
- continue
- }
- // Check each map value.
- keys := field.MapKeys()
- for _, k := range keys {
- v := field.MapIndex(k)
- if err := checkRequiredFieldsInValue(v); err != nil {
- return err
- }
- }
- case reflect.Slice:
- // Handle non-repeated type, e.g. bytes.
- if !prop.Repeated {
- if prop.Required && field.IsNil() {
- return fmt.Errorf("required field %q is not set", prop.Name)
- }
- continue
- }
-
- // Handle repeated type.
- if field.IsNil() {
- continue
- }
- // Check each slice item.
- for i := 0; i < field.Len(); i++ {
- v := field.Index(i)
- if err := checkRequiredFieldsInValue(v); err != nil {
- return err
- }
- }
- case reflect.Ptr:
- if field.IsNil() {
- if prop.Required {
- return fmt.Errorf("required field %q is not set", prop.Name)
- }
- continue
- }
- if err := checkRequiredFieldsInValue(field); err != nil {
- return err
- }
- }
- }
-
- // Handle proto2 extensions.
- for _, ext := range proto.RegisteredExtensions(pb) {
- if !proto.HasExtension(pb, ext) {
- continue
- }
- ep, err := proto.GetExtension(pb, ext)
- if err != nil {
- return err
- }
- err = checkRequiredFieldsInValue(reflect.ValueOf(ep))
- if err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func checkRequiredFieldsInValue(v reflect.Value) error {
- if pm, ok := v.Interface().(proto.Message); ok {
- return checkRequiredFields(pm)
- }
- return nil
-}
diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go
deleted file mode 100644
index 6f4a902b5..000000000
--- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go
+++ /dev/null
@@ -1,2806 +0,0 @@
-// Go support for Protocol Buffers - Google's data interchange format
-//
-// Copyright 2010 The Go Authors. All rights reserved.
-// https://github.com/golang/protobuf
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/*
- The code generator for the plugin for the Google protocol buffer compiler.
- It generates Go code from the protocol buffer description files read by the
- main routine.
-*/
-package generator
-
-import (
- "bufio"
- "bytes"
- "compress/gzip"
- "crypto/sha256"
- "encoding/hex"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/printer"
- "go/token"
- "log"
- "os"
- "path"
- "sort"
- "strconv"
- "strings"
- "unicode"
- "unicode/utf8"
-
- "github.com/golang/protobuf/proto"
- "github.com/golang/protobuf/protoc-gen-go/generator/internal/remap"
-
- "github.com/golang/protobuf/protoc-gen-go/descriptor"
- plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
-)
-
-// generatedCodeVersion indicates a version of the generated code.
-// It is incremented whenever an incompatibility between the generated code and
-// proto package is introduced; the generated code references
-// a constant, proto.ProtoPackageIsVersionN (where N is generatedCodeVersion).
-const generatedCodeVersion = 3
-
-// A Plugin provides functionality to add to the output during Go code generation,
-// such as to produce RPC stubs.
-type Plugin interface {
- // Name identifies the plugin.
- Name() string
- // Init is called once after data structures are built but before
- // code generation begins.
- Init(g *Generator)
- // Generate produces the code generated by the plugin for this file,
- // except for the imports, by calling the generator's methods P, In, and Out.
- Generate(file *FileDescriptor)
- // GenerateImports produces the import declarations for this file.
- // It is called after Generate.
- GenerateImports(file *FileDescriptor)
-}
-
-var plugins []Plugin
-
-// RegisterPlugin installs a (second-order) plugin to be run when the Go output is generated.
-// It is typically called during initialization.
-func RegisterPlugin(p Plugin) {
- plugins = append(plugins, p)
-}
-
-// A GoImportPath is the import path of a Go package. e.g., "google.golang.org/genproto/protobuf".
-type GoImportPath string
-
-func (p GoImportPath) String() string { return strconv.Quote(string(p)) }
-
-// A GoPackageName is the name of a Go package. e.g., "protobuf".
-type GoPackageName string
-
-// Each type we import as a protocol buffer (other than FileDescriptorProto) needs
-// a pointer to the FileDescriptorProto that represents it. These types achieve that
-// wrapping by placing each Proto inside a struct with the pointer to its File. The
-// structs have the same names as their contents, with "Proto" removed.
-// FileDescriptor is used to store the things that it points to.
-
-// The file and package name method are common to messages and enums.
-type common struct {
- file *FileDescriptor // File this object comes from.
-}
-
-// GoImportPath is the import path of the Go package containing the type.
-func (c *common) GoImportPath() GoImportPath {
- return c.file.importPath
-}
-
-func (c *common) File() *FileDescriptor { return c.file }
-
-func fileIsProto3(file *descriptor.FileDescriptorProto) bool {
- return file.GetSyntax() == "proto3"
-}
-
-func (c *common) proto3() bool { return fileIsProto3(c.file.FileDescriptorProto) }
-
-// Descriptor represents a protocol buffer message.
-type Descriptor struct {
- common
- *descriptor.DescriptorProto
- parent *Descriptor // The containing message, if any.
- nested []*Descriptor // Inner messages, if any.
- enums []*EnumDescriptor // Inner enums, if any.
- ext []*ExtensionDescriptor // Extensions, if any.
- typename []string // Cached typename vector.
- index int // The index into the container, whether the file or another message.
- path string // The SourceCodeInfo path as comma-separated integers.
- group bool
-}
-
-// TypeName returns the elements of the dotted type name.
-// The package name is not part of this name.
-func (d *Descriptor) TypeName() []string {
- if d.typename != nil {
- return d.typename
- }
- n := 0
- for parent := d; parent != nil; parent = parent.parent {
- n++
- }
- s := make([]string, n)
- for parent := d; parent != nil; parent = parent.parent {
- n--
- s[n] = parent.GetName()
- }
- d.typename = s
- return s
-}
-
-// EnumDescriptor describes an enum. If it's at top level, its parent will be nil.
-// Otherwise it will be the descriptor of the message in which it is defined.
-type EnumDescriptor struct {
- common
- *descriptor.EnumDescriptorProto
- parent *Descriptor // The containing message, if any.
- typename []string // Cached typename vector.
- index int // The index into the container, whether the file or a message.
- path string // The SourceCodeInfo path as comma-separated integers.
-}
-
-// TypeName returns the elements of the dotted type name.
-// The package name is not part of this name.
-func (e *EnumDescriptor) TypeName() (s []string) {
- if e.typename != nil {
- return e.typename
- }
- name := e.GetName()
- if e.parent == nil {
- s = make([]string, 1)
- } else {
- pname := e.parent.TypeName()
- s = make([]string, len(pname)+1)
- copy(s, pname)
- }
- s[len(s)-1] = name
- e.typename = s
- return s
-}
-
-// Everything but the last element of the full type name, CamelCased.
-// The values of type Foo.Bar are call Foo_value1... not Foo_Bar_value1... .
-func (e *EnumDescriptor) prefix() string {
- if e.parent == nil {
- // If the enum is not part of a message, the prefix is just the type name.
- return CamelCase(*e.Name) + "_"
- }
- typeName := e.TypeName()
- return CamelCaseSlice(typeName[0:len(typeName)-1]) + "_"
-}
-
-// The integer value of the named constant in this enumerated type.
-func (e *EnumDescriptor) integerValueAsString(name string) string {
- for _, c := range e.Value {
- if c.GetName() == name {
- return fmt.Sprint(c.GetNumber())
- }
- }
- log.Fatal("cannot find value for enum constant")
- return ""
-}
-
-// ExtensionDescriptor describes an extension. If it's at top level, its parent will be nil.
-// Otherwise it will be the descriptor of the message in which it is defined.
-type ExtensionDescriptor struct {
- common
- *descriptor.FieldDescriptorProto
- parent *Descriptor // The containing message, if any.
-}
-
-// TypeName returns the elements of the dotted type name.
-// The package name is not part of this name.
-func (e *ExtensionDescriptor) TypeName() (s []string) {
- name := e.GetName()
- if e.parent == nil {
- // top-level extension
- s = make([]string, 1)
- } else {
- pname := e.parent.TypeName()
- s = make([]string, len(pname)+1)
- copy(s, pname)
- }
- s[len(s)-1] = name
- return s
-}
-
-// DescName returns the variable name used for the generated descriptor.
-func (e *ExtensionDescriptor) DescName() string {
- // The full type name.
- typeName := e.TypeName()
- // Each scope of the extension is individually CamelCased, and all are joined with "_" with an "E_" prefix.
- for i, s := range typeName {
- typeName[i] = CamelCase(s)
- }
- return "E_" + strings.Join(typeName, "_")
-}
-
-// ImportedDescriptor describes a type that has been publicly imported from another file.
-type ImportedDescriptor struct {
- common
- o Object
-}
-
-func (id *ImportedDescriptor) TypeName() []string { return id.o.TypeName() }
-
-// FileDescriptor describes an protocol buffer descriptor file (.proto).
-// It includes slices of all the messages and enums defined within it.
-// Those slices are constructed by WrapTypes.
-type FileDescriptor struct {
- *descriptor.FileDescriptorProto
- desc []*Descriptor // All the messages defined in this file.
- enum []*EnumDescriptor // All the enums defined in this file.
- ext []*ExtensionDescriptor // All the top-level extensions defined in this file.
- imp []*ImportedDescriptor // All types defined in files publicly imported by this file.
-
- // Comments, stored as a map of path (comma-separated integers) to the comment.
- comments map[string]*descriptor.SourceCodeInfo_Location
-
- // The full list of symbols that are exported,
- // as a map from the exported object to its symbols.
- // This is used for supporting public imports.
- exported map[Object][]symbol
-
- importPath GoImportPath // Import path of this file's package.
- packageName GoPackageName // Name of this file's Go package.
-
- proto3 bool // whether to generate proto3 code for this file
-}
-
-// VarName is the variable name we'll use in the generated code to refer
-// to the compressed bytes of this descriptor. It is not exported, so
-// it is only valid inside the generated package.
-func (d *FileDescriptor) VarName() string {
- h := sha256.Sum256([]byte(d.GetName()))
- return fmt.Sprintf("fileDescriptor_%s", hex.EncodeToString(h[:8]))
-}
-
-// goPackageOption interprets the file's go_package option.
-// If there is no go_package, it returns ("", "", false).
-// If there's a simple name, it returns ("", pkg, true).
-// If the option implies an import path, it returns (impPath, pkg, true).
-func (d *FileDescriptor) goPackageOption() (impPath GoImportPath, pkg GoPackageName, ok bool) {
- opt := d.GetOptions().GetGoPackage()
- if opt == "" {
- return "", "", false
- }
- // A semicolon-delimited suffix delimits the import path and package name.
- sc := strings.Index(opt, ";")
- if sc >= 0 {
- return GoImportPath(opt[:sc]), cleanPackageName(opt[sc+1:]), true
- }
- // The presence of a slash implies there's an import path.
- slash := strings.LastIndex(opt, "/")
- if slash >= 0 {
- return GoImportPath(opt), cleanPackageName(opt[slash+1:]), true
- }
- return "", cleanPackageName(opt), true
-}
-
-// goFileName returns the output name for the generated Go file.
-func (d *FileDescriptor) goFileName(pathType pathType) string {
- name := *d.Name
- if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" {
- name = name[:len(name)-len(ext)]
- }
- name += ".pb.go"
-
- if pathType == pathTypeSourceRelative {
- return name
- }
-
- // Does the file have a "go_package" option?
- // If it does, it may override the filename.
- if impPath, _, ok := d.goPackageOption(); ok && impPath != "" {
- // Replace the existing dirname with the declared import path.
- _, name = path.Split(name)
- name = path.Join(string(impPath), name)
- return name
- }
-
- return name
-}
-
-func (d *FileDescriptor) addExport(obj Object, sym symbol) {
- d.exported[obj] = append(d.exported[obj], sym)
-}
-
-// symbol is an interface representing an exported Go symbol.
-type symbol interface {
- // GenerateAlias should generate an appropriate alias
- // for the symbol from the named package.
- GenerateAlias(g *Generator, filename string, pkg GoPackageName)
-}
-
-type messageSymbol struct {
- sym string
- hasExtensions, isMessageSet bool
- oneofTypes []string
-}
-
-type getterSymbol struct {
- name string
- typ string
- typeName string // canonical name in proto world; empty for proto.Message and similar
- genType bool // whether typ contains a generated type (message/group/enum)
-}
-
-func (ms *messageSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
- g.P("// ", ms.sym, " from public import ", filename)
- g.P("type ", ms.sym, " = ", pkg, ".", ms.sym)
- for _, name := range ms.oneofTypes {
- g.P("type ", name, " = ", pkg, ".", name)
- }
-}
-
-type enumSymbol struct {
- name string
- proto3 bool // Whether this came from a proto3 file.
-}
-
-func (es enumSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
- s := es.name
- g.P("// ", s, " from public import ", filename)
- g.P("type ", s, " = ", pkg, ".", s)
- g.P("var ", s, "_name = ", pkg, ".", s, "_name")
- g.P("var ", s, "_value = ", pkg, ".", s, "_value")
-}
-
-type constOrVarSymbol struct {
- sym string
- typ string // either "const" or "var"
- cast string // if non-empty, a type cast is required (used for enums)
-}
-
-func (cs constOrVarSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
- v := string(pkg) + "." + cs.sym
- if cs.cast != "" {
- v = cs.cast + "(" + v + ")"
- }
- g.P(cs.typ, " ", cs.sym, " = ", v)
-}
-
-// Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects.
-type Object interface {
- GoImportPath() GoImportPath
- TypeName() []string
- File() *FileDescriptor
-}
-
-// Generator is the type whose methods generate the output, stored in the associated response structure.
-type Generator struct {
- *bytes.Buffer
-
- Request *plugin.CodeGeneratorRequest // The input.
- Response *plugin.CodeGeneratorResponse // The output.
-
- Param map[string]string // Command-line parameters.
- PackageImportPath string // Go import path of the package we're generating code for
- ImportPrefix string // String to prefix to imported package file names.
- ImportMap map[string]string // Mapping from .proto file name to import path
-
- Pkg map[string]string // The names under which we import support packages
-
- outputImportPath GoImportPath // Package we're generating code for.
- allFiles []*FileDescriptor // All files in the tree
- allFilesByName map[string]*FileDescriptor // All files by filename.
- genFiles []*FileDescriptor // Those files we will generate output for.
- file *FileDescriptor // The file we are compiling now.
- packageNames map[GoImportPath]GoPackageName // Imported package names in the current file.
- usedPackages map[GoImportPath]bool // Packages used in current file.
- usedPackageNames map[GoPackageName]bool // Package names used in the current file.
- addedImports map[GoImportPath]bool // Additional imports to emit.
- typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax.
- init []string // Lines to emit in the init function.
- indent string
- pathType pathType // How to generate output filenames.
- writeOutput bool
- annotateCode bool // whether to store annotations
- annotations []*descriptor.GeneratedCodeInfo_Annotation // annotations to store
-}
-
-type pathType int
-
-const (
- pathTypeImport pathType = iota
- pathTypeSourceRelative
-)
-
-// New creates a new generator and allocates the request and response protobufs.
-func New() *Generator {
- g := new(Generator)
- g.Buffer = new(bytes.Buffer)
- g.Request = new(plugin.CodeGeneratorRequest)
- g.Response = new(plugin.CodeGeneratorResponse)
- return g
-}
-
-// Error reports a problem, including an error, and exits the program.
-func (g *Generator) Error(err error, msgs ...string) {
- s := strings.Join(msgs, " ") + ":" + err.Error()
- log.Print("protoc-gen-go: error:", s)
- os.Exit(1)
-}
-
-// Fail reports a problem and exits the program.
-func (g *Generator) Fail(msgs ...string) {
- s := strings.Join(msgs, " ")
- log.Print("protoc-gen-go: error:", s)
- os.Exit(1)
-}
-
-// CommandLineParameters breaks the comma-separated list of key=value pairs
-// in the parameter (a member of the request protobuf) into a key/value map.
-// It then sets file name mappings defined by those entries.
-func (g *Generator) CommandLineParameters(parameter string) {
- g.Param = make(map[string]string)
- for _, p := range strings.Split(parameter, ",") {
- if i := strings.Index(p, "="); i < 0 {
- g.Param[p] = ""
- } else {
- g.Param[p[0:i]] = p[i+1:]
- }
- }
-
- g.ImportMap = make(map[string]string)
- pluginList := "none" // Default list of plugin names to enable (empty means all).
- for k, v := range g.Param {
- switch k {
- case "import_prefix":
- g.ImportPrefix = v
- case "import_path":
- g.PackageImportPath = v
- case "paths":
- switch v {
- case "import":
- g.pathType = pathTypeImport
- case "source_relative":
- g.pathType = pathTypeSourceRelative
- default:
- g.Fail(fmt.Sprintf(`Unknown path type %q: want "import" or "source_relative".`, v))
- }
- case "plugins":
- pluginList = v
- case "annotate_code":
- if v == "true" {
- g.annotateCode = true
- }
- default:
- if len(k) > 0 && k[0] == 'M' {
- g.ImportMap[k[1:]] = v
- }
- }
- }
- if pluginList != "" {
- // Amend the set of plugins.
- enabled := make(map[string]bool)
- for _, name := range strings.Split(pluginList, "+") {
- enabled[name] = true
- }
- var nplugins []Plugin
- for _, p := range plugins {
- if enabled[p.Name()] {
- nplugins = append(nplugins, p)
- }
- }
- plugins = nplugins
- }
-}
-
-// DefaultPackageName returns the package name printed for the object.
-// If its file is in a different package, it returns the package name we're using for this file, plus ".".
-// Otherwise it returns the empty string.
-func (g *Generator) DefaultPackageName(obj Object) string {
- importPath := obj.GoImportPath()
- if importPath == g.outputImportPath {
- return ""
- }
- return string(g.GoPackageName(importPath)) + "."
-}
-
-// GoPackageName returns the name used for a package.
-func (g *Generator) GoPackageName(importPath GoImportPath) GoPackageName {
- if name, ok := g.packageNames[importPath]; ok {
- return name
- }
- name := cleanPackageName(baseName(string(importPath)))
- for i, orig := 1, name; g.usedPackageNames[name] || isGoPredeclaredIdentifier[string(name)]; i++ {
- name = orig + GoPackageName(strconv.Itoa(i))
- }
- g.packageNames[importPath] = name
- g.usedPackageNames[name] = true
- return name
-}
-
-// AddImport adds a package to the generated file's import section.
-// It returns the name used for the package.
-func (g *Generator) AddImport(importPath GoImportPath) GoPackageName {
- g.addedImports[importPath] = true
- return g.GoPackageName(importPath)
-}
-
-var globalPackageNames = map[GoPackageName]bool{
- "fmt": true,
- "math": true,
- "proto": true,
-}
-
-// Create and remember a guaranteed unique package name. Pkg is the candidate name.
-// The FileDescriptor parameter is unused.
-func RegisterUniquePackageName(pkg string, f *FileDescriptor) string {
- name := cleanPackageName(pkg)
- for i, orig := 1, name; globalPackageNames[name]; i++ {
- name = orig + GoPackageName(strconv.Itoa(i))
- }
- globalPackageNames[name] = true
- return string(name)
-}
-
-var isGoKeyword = map[string]bool{
- "break": true,
- "case": true,
- "chan": true,
- "const": true,
- "continue": true,
- "default": true,
- "else": true,
- "defer": true,
- "fallthrough": true,
- "for": true,
- "func": true,
- "go": true,
- "goto": true,
- "if": true,
- "import": true,
- "interface": true,
- "map": true,
- "package": true,
- "range": true,
- "return": true,
- "select": true,
- "struct": true,
- "switch": true,
- "type": true,
- "var": true,
-}
-
-var isGoPredeclaredIdentifier = map[string]bool{
- "append": true,
- "bool": true,
- "byte": true,
- "cap": true,
- "close": true,
- "complex": true,
- "complex128": true,
- "complex64": true,
- "copy": true,
- "delete": true,
- "error": true,
- "false": true,
- "float32": true,
- "float64": true,
- "imag": true,
- "int": true,
- "int16": true,
- "int32": true,
- "int64": true,
- "int8": true,
- "iota": true,
- "len": true,
- "make": true,
- "new": true,
- "nil": true,
- "panic": true,
- "print": true,
- "println": true,
- "real": true,
- "recover": true,
- "rune": true,
- "string": true,
- "true": true,
- "uint": true,
- "uint16": true,
- "uint32": true,
- "uint64": true,
- "uint8": true,
- "uintptr": true,
-}
-
-func cleanPackageName(name string) GoPackageName {
- name = strings.Map(badToUnderscore, name)
- // Identifier must not be keyword or predeclared identifier: insert _.
- if isGoKeyword[name] {
- name = "_" + name
- }
- // Identifier must not begin with digit: insert _.
- if r, _ := utf8.DecodeRuneInString(name); unicode.IsDigit(r) {
- name = "_" + name
- }
- return GoPackageName(name)
-}
-
-// defaultGoPackage returns the package name to use,
-// derived from the import path of the package we're building code for.
-func (g *Generator) defaultGoPackage() GoPackageName {
- p := g.PackageImportPath
- if i := strings.LastIndex(p, "/"); i >= 0 {
- p = p[i+1:]
- }
- return cleanPackageName(p)
-}
-
-// SetPackageNames sets the package name for this run.
-// The package name must agree across all files being generated.
-// It also defines unique package names for all imported files.
-func (g *Generator) SetPackageNames() {
- g.outputImportPath = g.genFiles[0].importPath
-
- defaultPackageNames := make(map[GoImportPath]GoPackageName)
- for _, f := range g.genFiles {
- if _, p, ok := f.goPackageOption(); ok {
- defaultPackageNames[f.importPath] = p
- }
- }
- for _, f := range g.genFiles {
- if _, p, ok := f.goPackageOption(); ok {
- // Source file: option go_package = "quux/bar";
- f.packageName = p
- } else if p, ok := defaultPackageNames[f.importPath]; ok {
- // A go_package option in another file in the same package.
- //
- // This is a poor choice in general, since every source file should
- // contain a go_package option. Supported mainly for historical
- // compatibility.
- f.packageName = p
- } else if p := g.defaultGoPackage(); p != "" {
- // Command-line: import_path=quux/bar.
- //
- // The import_path flag sets a package name for files which don't
- // contain a go_package option.
- f.packageName = p
- } else if p := f.GetPackage(); p != "" {
- // Source file: package quux.bar;
- f.packageName = cleanPackageName(p)
- } else {
- // Source filename.
- f.packageName = cleanPackageName(baseName(f.GetName()))
- }
- }
-
- // Check that all files have a consistent package name and import path.
- for _, f := range g.genFiles[1:] {
- if a, b := g.genFiles[0].importPath, f.importPath; a != b {
- g.Fail(fmt.Sprintf("inconsistent package import paths: %v, %v", a, b))
- }
- if a, b := g.genFiles[0].packageName, f.packageName; a != b {
- g.Fail(fmt.Sprintf("inconsistent package names: %v, %v", a, b))
- }
- }
-
- // Names of support packages. These never vary (if there are conflicts,
- // we rename the conflicting package), so this could be removed someday.
- g.Pkg = map[string]string{
- "fmt": "fmt",
- "math": "math",
- "proto": "proto",
- }
-}
-
-// WrapTypes walks the incoming data, wrapping DescriptorProtos, EnumDescriptorProtos
-// and FileDescriptorProtos into file-referenced objects within the Generator.
-// It also creates the list of files to generate and so should be called before GenerateAllFiles.
-func (g *Generator) WrapTypes() {
- g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile))
- g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles))
- genFileNames := make(map[string]bool)
- for _, n := range g.Request.FileToGenerate {
- genFileNames[n] = true
- }
- for _, f := range g.Request.ProtoFile {
- fd := &FileDescriptor{
- FileDescriptorProto: f,
- exported: make(map[Object][]symbol),
- proto3: fileIsProto3(f),
- }
- // The import path may be set in a number of ways.
- if substitution, ok := g.ImportMap[f.GetName()]; ok {
- // Command-line: M=foo.proto=quux/bar.
- //
- // Explicit mapping of source file to import path.
- fd.importPath = GoImportPath(substitution)
- } else if genFileNames[f.GetName()] && g.PackageImportPath != "" {
- // Command-line: import_path=quux/bar.
- //
- // The import_path flag sets the import path for every file that
- // we generate code for.
- fd.importPath = GoImportPath(g.PackageImportPath)
- } else if p, _, _ := fd.goPackageOption(); p != "" {
- // Source file: option go_package = "quux/bar";
- //
- // The go_package option sets the import path. Most users should use this.
- fd.importPath = p
- } else {
- // Source filename.
- //
- // Last resort when nothing else is available.
- fd.importPath = GoImportPath(path.Dir(f.GetName()))
- }
- // We must wrap the descriptors before we wrap the enums
- fd.desc = wrapDescriptors(fd)
- g.buildNestedDescriptors(fd.desc)
- fd.enum = wrapEnumDescriptors(fd, fd.desc)
- g.buildNestedEnums(fd.desc, fd.enum)
- fd.ext = wrapExtensions(fd)
- extractComments(fd)
- g.allFiles = append(g.allFiles, fd)
- g.allFilesByName[f.GetName()] = fd
- }
- for _, fd := range g.allFiles {
- fd.imp = wrapImported(fd, g)
- }
-
- g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate))
- for _, fileName := range g.Request.FileToGenerate {
- fd := g.allFilesByName[fileName]
- if fd == nil {
- g.Fail("could not find file named", fileName)
- }
- g.genFiles = append(g.genFiles, fd)
- }
-}
-
-// Scan the descriptors in this file. For each one, build the slice of nested descriptors
-func (g *Generator) buildNestedDescriptors(descs []*Descriptor) {
- for _, desc := range descs {
- if len(desc.NestedType) != 0 {
- for _, nest := range descs {
- if nest.parent == desc {
- desc.nested = append(desc.nested, nest)
- }
- }
- if len(desc.nested) != len(desc.NestedType) {
- g.Fail("internal error: nesting failure for", desc.GetName())
- }
- }
- }
-}
-
-func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescriptor) {
- for _, desc := range descs {
- if len(desc.EnumType) != 0 {
- for _, enum := range enums {
- if enum.parent == desc {
- desc.enums = append(desc.enums, enum)
- }
- }
- if len(desc.enums) != len(desc.EnumType) {
- g.Fail("internal error: enum nesting failure for", desc.GetName())
- }
- }
- }
-}
-
-// Construct the Descriptor
-func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *Descriptor {
- d := &Descriptor{
- common: common{file},
- DescriptorProto: desc,
- parent: parent,
- index: index,
- }
- if parent == nil {
- d.path = fmt.Sprintf("%d,%d", messagePath, index)
- } else {
- d.path = fmt.Sprintf("%s,%d,%d", parent.path, messageMessagePath, index)
- }
-
- // The only way to distinguish a group from a message is whether
- // the containing message has a TYPE_GROUP field that matches.
- if parent != nil {
- parts := d.TypeName()
- if file.Package != nil {
- parts = append([]string{*file.Package}, parts...)
- }
- exp := "." + strings.Join(parts, ".")
- for _, field := range parent.Field {
- if field.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP && field.GetTypeName() == exp {
- d.group = true
- break
- }
- }
- }
-
- for _, field := range desc.Extension {
- d.ext = append(d.ext, &ExtensionDescriptor{common{file}, field, d})
- }
-
- return d
-}
-
-// Return a slice of all the Descriptors defined within this file
-func wrapDescriptors(file *FileDescriptor) []*Descriptor {
- sl := make([]*Descriptor, 0, len(file.MessageType)+10)
- for i, desc := range file.MessageType {
- sl = wrapThisDescriptor(sl, desc, nil, file, i)
- }
- return sl
-}
-
-// Wrap this Descriptor, recursively
-func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *FileDescriptor, index int) []*Descriptor {
- sl = append(sl, newDescriptor(desc, parent, file, index))
- me := sl[len(sl)-1]
- for i, nested := range desc.NestedType {
- sl = wrapThisDescriptor(sl, nested, me, file, i)
- }
- return sl
-}
-
-// Construct the EnumDescriptor
-func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *FileDescriptor, index int) *EnumDescriptor {
- ed := &EnumDescriptor{
- common: common{file},
- EnumDescriptorProto: desc,
- parent: parent,
- index: index,
- }
- if parent == nil {
- ed.path = fmt.Sprintf("%d,%d", enumPath, index)
- } else {
- ed.path = fmt.Sprintf("%s,%d,%d", parent.path, messageEnumPath, index)
- }
- return ed
-}
-
-// Return a slice of all the EnumDescriptors defined within this file
-func wrapEnumDescriptors(file *FileDescriptor, descs []*Descriptor) []*EnumDescriptor {
- sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10)
- // Top-level enums.
- for i, enum := range file.EnumType {
- sl = append(sl, newEnumDescriptor(enum, nil, file, i))
- }
- // Enums within messages. Enums within embedded messages appear in the outer-most message.
- for _, nested := range descs {
- for i, enum := range nested.EnumType {
- sl = append(sl, newEnumDescriptor(enum, nested, file, i))
- }
- }
- return sl
-}
-
-// Return a slice of all the top-level ExtensionDescriptors defined within this file.
-func wrapExtensions(file *FileDescriptor) []*ExtensionDescriptor {
- var sl []*ExtensionDescriptor
- for _, field := range file.Extension {
- sl = append(sl, &ExtensionDescriptor{common{file}, field, nil})
- }
- return sl
-}
-
-// Return a slice of all the types that are publicly imported into this file.
-func wrapImported(file *FileDescriptor, g *Generator) (sl []*ImportedDescriptor) {
- for _, index := range file.PublicDependency {
- df := g.fileByName(file.Dependency[index])
- for _, d := range df.desc {
- if d.GetOptions().GetMapEntry() {
- continue
- }
- sl = append(sl, &ImportedDescriptor{common{file}, d})
- }
- for _, e := range df.enum {
- sl = append(sl, &ImportedDescriptor{common{file}, e})
- }
- for _, ext := range df.ext {
- sl = append(sl, &ImportedDescriptor{common{file}, ext})
- }
- }
- return
-}
-
-func extractComments(file *FileDescriptor) {
- file.comments = make(map[string]*descriptor.SourceCodeInfo_Location)
- for _, loc := range file.GetSourceCodeInfo().GetLocation() {
- if loc.LeadingComments == nil {
- continue
- }
- var p []string
- for _, n := range loc.Path {
- p = append(p, strconv.Itoa(int(n)))
- }
- file.comments[strings.Join(p, ",")] = loc
- }
-}
-
-// BuildTypeNameMap builds the map from fully qualified type names to objects.
-// The key names for the map come from the input data, which puts a period at the beginning.
-// It should be called after SetPackageNames and before GenerateAllFiles.
-func (g *Generator) BuildTypeNameMap() {
- g.typeNameToObject = make(map[string]Object)
- for _, f := range g.allFiles {
- // The names in this loop are defined by the proto world, not us, so the
- // package name may be empty. If so, the dotted package name of X will
- // be ".X"; otherwise it will be ".pkg.X".
- dottedPkg := "." + f.GetPackage()
- if dottedPkg != "." {
- dottedPkg += "."
- }
- for _, enum := range f.enum {
- name := dottedPkg + dottedSlice(enum.TypeName())
- g.typeNameToObject[name] = enum
- }
- for _, desc := range f.desc {
- name := dottedPkg + dottedSlice(desc.TypeName())
- g.typeNameToObject[name] = desc
- }
- }
-}
-
-// ObjectNamed, given a fully-qualified input type name as it appears in the input data,
-// returns the descriptor for the message or enum with that name.
-func (g *Generator) ObjectNamed(typeName string) Object {
- o, ok := g.typeNameToObject[typeName]
- if !ok {
- g.Fail("can't find object with type", typeName)
- }
- return o
-}
-
-// AnnotatedAtoms is a list of atoms (as consumed by P) that records the file name and proto AST path from which they originated.
-type AnnotatedAtoms struct {
- source string
- path string
- atoms []interface{}
-}
-
-// Annotate records the file name and proto AST path of a list of atoms
-// so that a later call to P can emit a link from each atom to its origin.
-func Annotate(file *FileDescriptor, path string, atoms ...interface{}) *AnnotatedAtoms {
- return &AnnotatedAtoms{source: *file.Name, path: path, atoms: atoms}
-}
-
-// printAtom prints the (atomic, non-annotation) argument to the generated output.
-func (g *Generator) printAtom(v interface{}) {
- switch v := v.(type) {
- case string:
- g.WriteString(v)
- case *string:
- g.WriteString(*v)
- case bool:
- fmt.Fprint(g, v)
- case *bool:
- fmt.Fprint(g, *v)
- case int:
- fmt.Fprint(g, v)
- case *int32:
- fmt.Fprint(g, *v)
- case *int64:
- fmt.Fprint(g, *v)
- case float64:
- fmt.Fprint(g, v)
- case *float64:
- fmt.Fprint(g, *v)
- case GoPackageName:
- g.WriteString(string(v))
- case GoImportPath:
- g.WriteString(strconv.Quote(string(v)))
- default:
- g.Fail(fmt.Sprintf("unknown type in printer: %T", v))
- }
-}
-
-// P prints the arguments to the generated output. It handles strings and int32s, plus
-// handling indirections because they may be *string, etc. Any inputs of type AnnotatedAtoms may emit
-// annotations in a .meta file in addition to outputting the atoms themselves (if g.annotateCode
-// is true).
-func (g *Generator) P(str ...interface{}) {
- if !g.writeOutput {
- return
- }
- g.WriteString(g.indent)
- for _, v := range str {
- switch v := v.(type) {
- case *AnnotatedAtoms:
- begin := int32(g.Len())
- for _, v := range v.atoms {
- g.printAtom(v)
- }
- if g.annotateCode {
- end := int32(g.Len())
- var path []int32
- for _, token := range strings.Split(v.path, ",") {
- val, err := strconv.ParseInt(token, 10, 32)
- if err != nil {
- g.Fail("could not parse proto AST path: ", err.Error())
- }
- path = append(path, int32(val))
- }
- g.annotations = append(g.annotations, &descriptor.GeneratedCodeInfo_Annotation{
- Path: path,
- SourceFile: &v.source,
- Begin: &begin,
- End: &end,
- })
- }
- default:
- g.printAtom(v)
- }
- }
- g.WriteByte('\n')
-}
-
-// addInitf stores the given statement to be printed inside the file's init function.
-// The statement is given as a format specifier and arguments.
-func (g *Generator) addInitf(stmt string, a ...interface{}) {
- g.init = append(g.init, fmt.Sprintf(stmt, a...))
-}
-
-// In Indents the output one tab stop.
-func (g *Generator) In() { g.indent += "\t" }
-
-// Out unindents the output one tab stop.
-func (g *Generator) Out() {
- if len(g.indent) > 0 {
- g.indent = g.indent[1:]
- }
-}
-
-// GenerateAllFiles generates the output for all the files we're outputting.
-func (g *Generator) GenerateAllFiles() {
- // Initialize the plugins
- for _, p := range plugins {
- p.Init(g)
- }
- // Generate the output. The generator runs for every file, even the files
- // that we don't generate output for, so that we can collate the full list
- // of exported symbols to support public imports.
- genFileMap := make(map[*FileDescriptor]bool, len(g.genFiles))
- for _, file := range g.genFiles {
- genFileMap[file] = true
- }
- for _, file := range g.allFiles {
- g.Reset()
- g.annotations = nil
- g.writeOutput = genFileMap[file]
- g.generate(file)
- if !g.writeOutput {
- continue
- }
- fname := file.goFileName(g.pathType)
- g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{
- Name: proto.String(fname),
- Content: proto.String(g.String()),
- })
- if g.annotateCode {
- // Store the generated code annotations in text, as the protoc plugin protocol requires that
- // strings contain valid UTF-8.
- g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{
- Name: proto.String(file.goFileName(g.pathType) + ".meta"),
- Content: proto.String(proto.CompactTextString(&descriptor.GeneratedCodeInfo{Annotation: g.annotations})),
- })
- }
- }
-}
-
-// Run all the plugins associated with the file.
-func (g *Generator) runPlugins(file *FileDescriptor) {
- for _, p := range plugins {
- p.Generate(file)
- }
-}
-
-// Fill the response protocol buffer with the generated output for all the files we're
-// supposed to generate.
-func (g *Generator) generate(file *FileDescriptor) {
- g.file = file
- g.usedPackages = make(map[GoImportPath]bool)
- g.packageNames = make(map[GoImportPath]GoPackageName)
- g.usedPackageNames = make(map[GoPackageName]bool)
- g.addedImports = make(map[GoImportPath]bool)
- for name := range globalPackageNames {
- g.usedPackageNames[name] = true
- }
-
- g.P("// This is a compile-time assertion to ensure that this generated file")
- g.P("// is compatible with the proto package it is being compiled against.")
- g.P("// A compilation error at this line likely means your copy of the")
- g.P("// proto package needs to be updated.")
- g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package")
- g.P()
-
- for _, td := range g.file.imp {
- g.generateImported(td)
- }
- for _, enum := range g.file.enum {
- g.generateEnum(enum)
- }
- for _, desc := range g.file.desc {
- // Don't generate virtual messages for maps.
- if desc.GetOptions().GetMapEntry() {
- continue
- }
- g.generateMessage(desc)
- }
- for _, ext := range g.file.ext {
- g.generateExtension(ext)
- }
- g.generateInitFunction()
- g.generateFileDescriptor(file)
-
- // Run the plugins before the imports so we know which imports are necessary.
- g.runPlugins(file)
-
- // Generate header and imports last, though they appear first in the output.
- rem := g.Buffer
- remAnno := g.annotations
- g.Buffer = new(bytes.Buffer)
- g.annotations = nil
- g.generateHeader()
- g.generateImports()
- if !g.writeOutput {
- return
- }
- // Adjust the offsets for annotations displaced by the header and imports.
- for _, anno := range remAnno {
- *anno.Begin += int32(g.Len())
- *anno.End += int32(g.Len())
- g.annotations = append(g.annotations, anno)
- }
- g.Write(rem.Bytes())
-
- // Reformat generated code and patch annotation locations.
- fset := token.NewFileSet()
- original := g.Bytes()
- if g.annotateCode {
- // make a copy independent of g; we'll need it after Reset.
- original = append([]byte(nil), original...)
- }
- fileAST, err := parser.ParseFile(fset, "", original, parser.ParseComments)
- if err != nil {
- // Print out the bad code with line numbers.
- // This should never happen in practice, but it can while changing generated code,
- // so consider this a debugging aid.
- var src bytes.Buffer
- s := bufio.NewScanner(bytes.NewReader(original))
- for line := 1; s.Scan(); line++ {
- fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes())
- }
- g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String())
- }
- ast.SortImports(fset, fileAST)
- g.Reset()
- err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, fileAST)
- if err != nil {
- g.Fail("generated Go source code could not be reformatted:", err.Error())
- }
- if g.annotateCode {
- m, err := remap.Compute(original, g.Bytes())
- if err != nil {
- g.Fail("formatted generated Go source code could not be mapped back to the original code:", err.Error())
- }
- for _, anno := range g.annotations {
- new, ok := m.Find(int(*anno.Begin), int(*anno.End))
- if !ok {
- g.Fail("span in formatted generated Go source code could not be mapped back to the original code")
- }
- *anno.Begin = int32(new.Pos)
- *anno.End = int32(new.End)
- }
- }
-}
-
-// Generate the header, including package definition
-func (g *Generator) generateHeader() {
- g.P("// Code generated by protoc-gen-go. DO NOT EDIT.")
- if g.file.GetOptions().GetDeprecated() {
- g.P("// ", g.file.Name, " is a deprecated file.")
- } else {
- g.P("// source: ", g.file.Name)
- }
- g.P()
- g.PrintComments(strconv.Itoa(packagePath))
- g.P()
- g.P("package ", g.file.packageName)
- g.P()
-}
-
-// deprecationComment is the standard comment added to deprecated
-// messages, fields, enums, and enum values.
-var deprecationComment = "// Deprecated: Do not use."
-
-// PrintComments prints any comments from the source .proto file.
-// The path is a comma-separated list of integers.
-// It returns an indication of whether any comments were printed.
-// See descriptor.proto for its format.
-func (g *Generator) PrintComments(path string) bool {
- if !g.writeOutput {
- return false
- }
- if c, ok := g.makeComments(path); ok {
- g.P(c)
- return true
- }
- return false
-}
-
-// makeComments generates the comment string for the field, no "\n" at the end
-func (g *Generator) makeComments(path string) (string, bool) {
- loc, ok := g.file.comments[path]
- if !ok {
- return "", false
- }
- w := new(bytes.Buffer)
- nl := ""
- for _, line := range strings.Split(strings.TrimSuffix(loc.GetLeadingComments(), "\n"), "\n") {
- fmt.Fprintf(w, "%s//%s", nl, line)
- nl = "\n"
- }
- return w.String(), true
-}
-
-func (g *Generator) fileByName(filename string) *FileDescriptor {
- return g.allFilesByName[filename]
-}
-
-// weak returns whether the ith import of the current file is a weak import.
-func (g *Generator) weak(i int32) bool {
- for _, j := range g.file.WeakDependency {
- if j == i {
- return true
- }
- }
- return false
-}
-
-// Generate the imports
-func (g *Generator) generateImports() {
- imports := make(map[GoImportPath]GoPackageName)
- for i, s := range g.file.Dependency {
- fd := g.fileByName(s)
- importPath := fd.importPath
- // Do not import our own package.
- if importPath == g.file.importPath {
- continue
- }
- // Do not import weak imports.
- if g.weak(int32(i)) {
- continue
- }
- // Do not import a package twice.
- if _, ok := imports[importPath]; ok {
- continue
- }
- // We need to import all the dependencies, even if we don't reference them,
- // because other code and tools depend on having the full transitive closure
- // of protocol buffer types in the binary.
- packageName := g.GoPackageName(importPath)
- if _, ok := g.usedPackages[importPath]; !ok {
- packageName = "_"
- }
- imports[importPath] = packageName
- }
- for importPath := range g.addedImports {
- imports[importPath] = g.GoPackageName(importPath)
- }
- // We almost always need a proto import. Rather than computing when we
- // do, which is tricky when there's a plugin, just import it and
- // reference it later. The same argument applies to the fmt and math packages.
- g.P("import (")
- g.P(g.Pkg["fmt"] + ` "fmt"`)
- g.P(g.Pkg["math"] + ` "math"`)
- g.P(g.Pkg["proto"]+" ", GoImportPath(g.ImportPrefix)+"github.com/golang/protobuf/proto")
- for importPath, packageName := range imports {
- g.P(packageName, " ", GoImportPath(g.ImportPrefix)+importPath)
- }
- g.P(")")
- g.P()
- // TODO: may need to worry about uniqueness across plugins
- for _, p := range plugins {
- p.GenerateImports(g.file)
- g.P()
- }
- g.P("// Reference imports to suppress errors if they are not otherwise used.")
- g.P("var _ = ", g.Pkg["proto"], ".Marshal")
- g.P("var _ = ", g.Pkg["fmt"], ".Errorf")
- g.P("var _ = ", g.Pkg["math"], ".Inf")
- g.P()
-}
-
-func (g *Generator) generateImported(id *ImportedDescriptor) {
- df := id.o.File()
- filename := *df.Name
- if df.importPath == g.file.importPath {
- // Don't generate type aliases for files in the same Go package as this one.
- return
- }
- if !supportTypeAliases {
- g.Fail(fmt.Sprintf("%s: public imports require at least go1.9", filename))
- }
- g.usedPackages[df.importPath] = true
-
- for _, sym := range df.exported[id.o] {
- sym.GenerateAlias(g, filename, g.GoPackageName(df.importPath))
- }
-
- g.P()
-}
-
-// Generate the enum definitions for this EnumDescriptor.
-func (g *Generator) generateEnum(enum *EnumDescriptor) {
- // The full type name
- typeName := enum.TypeName()
- // The full type name, CamelCased.
- ccTypeName := CamelCaseSlice(typeName)
- ccPrefix := enum.prefix()
-
- deprecatedEnum := ""
- if enum.GetOptions().GetDeprecated() {
- deprecatedEnum = deprecationComment
- }
- g.PrintComments(enum.path)
- g.P("type ", Annotate(enum.file, enum.path, ccTypeName), " int32", deprecatedEnum)
- g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()})
- g.P("const (")
- for i, e := range enum.Value {
- etorPath := fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)
- g.PrintComments(etorPath)
-
- deprecatedValue := ""
- if e.GetOptions().GetDeprecated() {
- deprecatedValue = deprecationComment
- }
-
- name := ccPrefix + *e.Name
- g.P(Annotate(enum.file, etorPath, name), " ", ccTypeName, " = ", e.Number, " ", deprecatedValue)
- g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName})
- }
- g.P(")")
- g.P()
- g.P("var ", ccTypeName, "_name = map[int32]string{")
- generated := make(map[int32]bool) // avoid duplicate values
- for _, e := range enum.Value {
- duplicate := ""
- if _, present := generated[*e.Number]; present {
- duplicate = "// Duplicate value: "
- }
- g.P(duplicate, e.Number, ": ", strconv.Quote(*e.Name), ",")
- generated[*e.Number] = true
- }
- g.P("}")
- g.P()
- g.P("var ", ccTypeName, "_value = map[string]int32{")
- for _, e := range enum.Value {
- g.P(strconv.Quote(*e.Name), ": ", e.Number, ",")
- }
- g.P("}")
- g.P()
-
- if !enum.proto3() {
- g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {")
- g.P("p := new(", ccTypeName, ")")
- g.P("*p = x")
- g.P("return p")
- g.P("}")
- g.P()
- }
-
- g.P("func (x ", ccTypeName, ") String() string {")
- g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))")
- g.P("}")
- g.P()
-
- if !enum.proto3() {
- g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {")
- g.P("value, err := ", g.Pkg["proto"], ".UnmarshalJSONEnum(", ccTypeName, `_value, data, "`, ccTypeName, `")`)
- g.P("if err != nil {")
- g.P("return err")
- g.P("}")
- g.P("*x = ", ccTypeName, "(value)")
- g.P("return nil")
- g.P("}")
- g.P()
- }
-
- var indexes []string
- for m := enum.parent; m != nil; m = m.parent {
- // XXX: skip groups?
- indexes = append([]string{strconv.Itoa(m.index)}, indexes...)
- }
- indexes = append(indexes, strconv.Itoa(enum.index))
- g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) {")
- g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}")
- g.P("}")
- g.P()
- if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" {
- g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`)
- g.P()
- }
-
- g.generateEnumRegistration(enum)
-}
-
-// The tag is a string like "varint,2,opt,name=fieldname,def=7" that
-// identifies details of the field for the protocol buffer marshaling and unmarshaling
-// code. The fields are:
-// wire encoding
-// protocol tag number
-// opt,req,rep for optional, required, or repeated
-// packed whether the encoding is "packed" (optional; repeated primitives only)
-// name= the original declared name
-// enum= the name of the enum type if it is an enum-typed field.
-// proto3 if this field is in a proto3 message
-// def= string representation of the default value, if any.
-// The default value must be in a representation that can be used at run-time
-// to generate the default value. Thus bools become 0 and 1, for instance.
-func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptorProto, wiretype string) string {
- optrepreq := ""
- switch {
- case isOptional(field):
- optrepreq = "opt"
- case isRequired(field):
- optrepreq = "req"
- case isRepeated(field):
- optrepreq = "rep"
- }
- var defaultValue string
- if dv := field.DefaultValue; dv != nil { // set means an explicit default
- defaultValue = *dv
- // Some types need tweaking.
- switch *field.Type {
- case descriptor.FieldDescriptorProto_TYPE_BOOL:
- if defaultValue == "true" {
- defaultValue = "1"
- } else {
- defaultValue = "0"
- }
- case descriptor.FieldDescriptorProto_TYPE_STRING,
- descriptor.FieldDescriptorProto_TYPE_BYTES:
- // Nothing to do. Quoting is done for the whole tag.
- case descriptor.FieldDescriptorProto_TYPE_ENUM:
- // For enums we need to provide the integer constant.
- obj := g.ObjectNamed(field.GetTypeName())
- if id, ok := obj.(*ImportedDescriptor); ok {
- // It is an enum that was publicly imported.
- // We need the underlying type.
- obj = id.o
- }
- enum, ok := obj.(*EnumDescriptor)
- if !ok {
- log.Printf("obj is a %T", obj)
- if id, ok := obj.(*ImportedDescriptor); ok {
- log.Printf("id.o is a %T", id.o)
- }
- g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName()))
- }
- defaultValue = enum.integerValueAsString(defaultValue)
- case descriptor.FieldDescriptorProto_TYPE_FLOAT:
- if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" {
- if f, err := strconv.ParseFloat(defaultValue, 32); err == nil {
- defaultValue = fmt.Sprint(float32(f))
- }
- }
- case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
- if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" {
- if f, err := strconv.ParseFloat(defaultValue, 64); err == nil {
- defaultValue = fmt.Sprint(f)
- }
- }
- }
- defaultValue = ",def=" + defaultValue
- }
- enum := ""
- if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM {
- // We avoid using obj.GoPackageName(), because we want to use the
- // original (proto-world) package name.
- obj := g.ObjectNamed(field.GetTypeName())
- if id, ok := obj.(*ImportedDescriptor); ok {
- obj = id.o
- }
- enum = ",enum="
- if pkg := obj.File().GetPackage(); pkg != "" {
- enum += pkg + "."
- }
- enum += CamelCaseSlice(obj.TypeName())
- }
- packed := ""
- if (field.Options != nil && field.Options.GetPacked()) ||
- // Per https://developers.google.com/protocol-buffers/docs/proto3#simple:
- // "In proto3, repeated fields of scalar numeric types use packed encoding by default."
- (message.proto3() && (field.Options == nil || field.Options.Packed == nil) &&
- isRepeated(field) && isScalar(field)) {
- packed = ",packed"
- }
- fieldName := field.GetName()
- name := fieldName
- if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP {
- // We must use the type name for groups instead of
- // the field name to preserve capitalization.
- // type_name in FieldDescriptorProto is fully-qualified,
- // but we only want the local part.
- name = *field.TypeName
- if i := strings.LastIndex(name, "."); i >= 0 {
- name = name[i+1:]
- }
- }
- if json := field.GetJsonName(); field.Extendee == nil && json != "" && json != name {
- // TODO: escaping might be needed, in which case
- // perhaps this should be in its own "json" tag.
- name += ",json=" + json
- }
- name = ",name=" + name
- if message.proto3() {
- name += ",proto3"
- }
- oneof := ""
- if field.OneofIndex != nil {
- oneof = ",oneof"
- }
- return strconv.Quote(fmt.Sprintf("%s,%d,%s%s%s%s%s%s",
- wiretype,
- field.GetNumber(),
- optrepreq,
- packed,
- name,
- enum,
- oneof,
- defaultValue))
-}
-
-func needsStar(typ descriptor.FieldDescriptorProto_Type) bool {
- switch typ {
- case descriptor.FieldDescriptorProto_TYPE_GROUP:
- return false
- case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
- return false
- case descriptor.FieldDescriptorProto_TYPE_BYTES:
- return false
- }
- return true
-}
-
-// TypeName is the printed name appropriate for an item. If the object is in the current file,
-// TypeName drops the package name and underscores the rest.
-// Otherwise the object is from another package; and the result is the underscored
-// package name followed by the item name.
-// The result always has an initial capital.
-func (g *Generator) TypeName(obj Object) string {
- return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName())
-}
-
-// GoType returns a string representing the type name, and the wire type
-func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) {
- // TODO: Options.
- switch *field.Type {
- case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
- typ, wire = "float64", "fixed64"
- case descriptor.FieldDescriptorProto_TYPE_FLOAT:
- typ, wire = "float32", "fixed32"
- case descriptor.FieldDescriptorProto_TYPE_INT64:
- typ, wire = "int64", "varint"
- case descriptor.FieldDescriptorProto_TYPE_UINT64:
- typ, wire = "uint64", "varint"
- case descriptor.FieldDescriptorProto_TYPE_INT32:
- typ, wire = "int32", "varint"
- case descriptor.FieldDescriptorProto_TYPE_UINT32:
- typ, wire = "uint32", "varint"
- case descriptor.FieldDescriptorProto_TYPE_FIXED64:
- typ, wire = "uint64", "fixed64"
- case descriptor.FieldDescriptorProto_TYPE_FIXED32:
- typ, wire = "uint32", "fixed32"
- case descriptor.FieldDescriptorProto_TYPE_BOOL:
- typ, wire = "bool", "varint"
- case descriptor.FieldDescriptorProto_TYPE_STRING:
- typ, wire = "string", "bytes"
- case descriptor.FieldDescriptorProto_TYPE_GROUP:
- desc := g.ObjectNamed(field.GetTypeName())
- typ, wire = "*"+g.TypeName(desc), "group"
- case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
- desc := g.ObjectNamed(field.GetTypeName())
- typ, wire = "*"+g.TypeName(desc), "bytes"
- case descriptor.FieldDescriptorProto_TYPE_BYTES:
- typ, wire = "[]byte", "bytes"
- case descriptor.FieldDescriptorProto_TYPE_ENUM:
- desc := g.ObjectNamed(field.GetTypeName())
- typ, wire = g.TypeName(desc), "varint"
- case descriptor.FieldDescriptorProto_TYPE_SFIXED32:
- typ, wire = "int32", "fixed32"
- case descriptor.FieldDescriptorProto_TYPE_SFIXED64:
- typ, wire = "int64", "fixed64"
- case descriptor.FieldDescriptorProto_TYPE_SINT32:
- typ, wire = "int32", "zigzag32"
- case descriptor.FieldDescriptorProto_TYPE_SINT64:
- typ, wire = "int64", "zigzag64"
- default:
- g.Fail("unknown type for", field.GetName())
- }
- if isRepeated(field) {
- typ = "[]" + typ
- } else if message != nil && message.proto3() {
- return
- } else if field.OneofIndex != nil && message != nil {
- return
- } else if needsStar(*field.Type) {
- typ = "*" + typ
- }
- return
-}
-
-func (g *Generator) RecordTypeUse(t string) {
- if _, ok := g.typeNameToObject[t]; !ok {
- return
- }
- importPath := g.ObjectNamed(t).GoImportPath()
- if importPath == g.outputImportPath {
- // Don't record use of objects in our package.
- return
- }
- g.AddImport(importPath)
- g.usedPackages[importPath] = true
-}
-
-// Method names that may be generated. Fields with these names get an
-// underscore appended. Any change to this set is a potential incompatible
-// API change because it changes generated field names.
-var methodNames = [...]string{
- "Reset",
- "String",
- "ProtoMessage",
- "Marshal",
- "Unmarshal",
- "ExtensionRangeArray",
- "ExtensionMap",
- "Descriptor",
-}
-
-// Names of messages in the `google.protobuf` package for which
-// we will generate XXX_WellKnownType methods.
-var wellKnownTypes = map[string]bool{
- "Any": true,
- "Duration": true,
- "Empty": true,
- "Struct": true,
- "Timestamp": true,
-
- "Value": true,
- "ListValue": true,
- "DoubleValue": true,
- "FloatValue": true,
- "Int64Value": true,
- "UInt64Value": true,
- "Int32Value": true,
- "UInt32Value": true,
- "BoolValue": true,
- "StringValue": true,
- "BytesValue": true,
-}
-
-// getterDefault finds the default value for the field to return from a getter,
-// regardless of if it's a built in default or explicit from the source. Returns e.g. "nil", `""`, "Default_MessageType_FieldName"
-func (g *Generator) getterDefault(field *descriptor.FieldDescriptorProto, goMessageType string) string {
- if isRepeated(field) {
- return "nil"
- }
- if def := field.GetDefaultValue(); def != "" {
- defaultConstant := g.defaultConstantName(goMessageType, field.GetName())
- if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES {
- return defaultConstant
- }
- return "append([]byte(nil), " + defaultConstant + "...)"
- }
- switch *field.Type {
- case descriptor.FieldDescriptorProto_TYPE_BOOL:
- return "false"
- case descriptor.FieldDescriptorProto_TYPE_STRING:
- return `""`
- case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_BYTES:
- return "nil"
- case descriptor.FieldDescriptorProto_TYPE_ENUM:
- obj := g.ObjectNamed(field.GetTypeName())
- var enum *EnumDescriptor
- if id, ok := obj.(*ImportedDescriptor); ok {
- // The enum type has been publicly imported.
- enum, _ = id.o.(*EnumDescriptor)
- } else {
- enum, _ = obj.(*EnumDescriptor)
- }
- if enum == nil {
- log.Printf("don't know how to generate getter for %s", field.GetName())
- return "nil"
- }
- if len(enum.Value) == 0 {
- return "0 // empty enum"
- }
- first := enum.Value[0].GetName()
- return g.DefaultPackageName(obj) + enum.prefix() + first
- default:
- return "0"
- }
-}
-
-// defaultConstantName builds the name of the default constant from the message
-// type name and the untouched field name, e.g. "Default_MessageType_FieldName"
-func (g *Generator) defaultConstantName(goMessageType, protoFieldName string) string {
- return "Default_" + goMessageType + "_" + CamelCase(protoFieldName)
-}
-
-// The different types of fields in a message and how to actually print them
-// Most of the logic for generateMessage is in the methods of these types.
-//
-// Note that the content of the field is irrelevant, a simpleField can contain
-// anything from a scalar to a group (which is just a message).
-//
-// Extension fields (and message sets) are however handled separately.
-//
-// simpleField - a field that is neiter weak nor oneof, possibly repeated
-// oneofField - field containing list of subfields:
-// - oneofSubField - a field within the oneof
-
-// msgCtx contains the context for the generator functions.
-type msgCtx struct {
- goName string // Go struct name of the message, e.g. MessageName
- message *Descriptor // The descriptor for the message
-}
-
-// fieldCommon contains data common to all types of fields.
-type fieldCommon struct {
- goName string // Go name of field, e.g. "FieldName" or "Descriptor_"
- protoName string // Name of field in proto language, e.g. "field_name" or "descriptor"
- getterName string // Name of the getter, e.g. "GetFieldName" or "GetDescriptor_"
- goType string // The Go type as a string, e.g. "*int32" or "*OtherMessage"
- tags string // The tag string/annotation for the type, e.g. `protobuf:"varint,8,opt,name=region_id,json=regionId"`
- fullPath string // The full path of the field as used by Annotate etc, e.g. "4,0,2,0"
-}
-
-// getProtoName gets the proto name of a field, e.g. "field_name" or "descriptor".
-func (f *fieldCommon) getProtoName() string {
- return f.protoName
-}
-
-// getGoType returns the go type of the field as a string, e.g. "*int32".
-func (f *fieldCommon) getGoType() string {
- return f.goType
-}
-
-// simpleField is not weak, not a oneof, not an extension. Can be required, optional or repeated.
-type simpleField struct {
- fieldCommon
- protoTypeName string // Proto type name, empty if primitive, e.g. ".google.protobuf.Duration"
- protoType descriptor.FieldDescriptorProto_Type // Actual type enum value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64
- deprecated string // Deprecation comment, if any, e.g. "// Deprecated: Do not use."
- getterDef string // Default for getters, e.g. "nil", `""` or "Default_MessageType_FieldName"
- protoDef string // Default value as defined in the proto file, e.g "yoshi" or "5"
- comment string // The full comment for the field, e.g. "// Useful information"
-}
-
-// decl prints the declaration of the field in the struct (if any).
-func (f *simpleField) decl(g *Generator, mc *msgCtx) {
- g.P(f.comment, Annotate(mc.message.file, f.fullPath, f.goName), "\t", f.goType, "\t`", f.tags, "`", f.deprecated)
-}
-
-// getter prints the getter for the field.
-func (f *simpleField) getter(g *Generator, mc *msgCtx) {
- star := ""
- tname := f.goType
- if needsStar(f.protoType) && tname[0] == '*' {
- tname = tname[1:]
- star = "*"
- }
- if f.deprecated != "" {
- g.P(f.deprecated)
- }
- g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, f.fullPath, f.getterName), "() "+tname+" {")
- if f.getterDef == "nil" { // Simpler getter
- g.P("if m != nil {")
- g.P("return m." + f.goName)
- g.P("}")
- g.P("return nil")
- g.P("}")
- g.P()
- return
- }
- if mc.message.proto3() {
- g.P("if m != nil {")
- } else {
- g.P("if m != nil && m." + f.goName + " != nil {")
- }
- g.P("return " + star + "m." + f.goName)
- g.P("}")
- g.P("return ", f.getterDef)
- g.P("}")
- g.P()
-}
-
-// setter prints the setter method of the field.
-func (f *simpleField) setter(g *Generator, mc *msgCtx) {
- // No setter for regular fields yet
-}
-
-// getProtoDef returns the default value explicitly stated in the proto file, e.g "yoshi" or "5".
-func (f *simpleField) getProtoDef() string {
- return f.protoDef
-}
-
-// getProtoTypeName returns the protobuf type name for the field as returned by field.GetTypeName(), e.g. ".google.protobuf.Duration".
-func (f *simpleField) getProtoTypeName() string {
- return f.protoTypeName
-}
-
-// getProtoType returns the *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64.
-func (f *simpleField) getProtoType() descriptor.FieldDescriptorProto_Type {
- return f.protoType
-}
-
-// oneofSubFields are kept slize held by each oneofField. They do not appear in the top level slize of fields for the message.
-type oneofSubField struct {
- fieldCommon
- protoTypeName string // Proto type name, empty if primitive, e.g. ".google.protobuf.Duration"
- protoType descriptor.FieldDescriptorProto_Type // Actual type enum value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64
- oneofTypeName string // Type name of the enclosing struct, e.g. "MessageName_FieldName"
- fieldNumber int // Actual field number, as defined in proto, e.g. 12
- getterDef string // Default for getters, e.g. "nil", `""` or "Default_MessageType_FieldName"
- protoDef string // Default value as defined in the proto file, e.g "yoshi" or "5"
- deprecated string // Deprecation comment, if any.
-}
-
-// typedNil prints a nil casted to the pointer to this field.
-// - for XXX_OneofWrappers
-func (f *oneofSubField) typedNil(g *Generator) {
- g.P("(*", f.oneofTypeName, ")(nil),")
-}
-
-// getProtoDef returns the default value explicitly stated in the proto file, e.g "yoshi" or "5".
-func (f *oneofSubField) getProtoDef() string {
- return f.protoDef
-}
-
-// getProtoTypeName returns the protobuf type name for the field as returned by field.GetTypeName(), e.g. ".google.protobuf.Duration".
-func (f *oneofSubField) getProtoTypeName() string {
- return f.protoTypeName
-}
-
-// getProtoType returns the *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64.
-func (f *oneofSubField) getProtoType() descriptor.FieldDescriptorProto_Type {
- return f.protoType
-}
-
-// oneofField represents the oneof on top level.
-// The alternative fields within the oneof are represented by oneofSubField.
-type oneofField struct {
- fieldCommon
- subFields []*oneofSubField // All the possible oneof fields
- comment string // The full comment for the field, e.g. "// Types that are valid to be assigned to MyOneof:\n\\"
-}
-
-// decl prints the declaration of the field in the struct (if any).
-func (f *oneofField) decl(g *Generator, mc *msgCtx) {
- comment := f.comment
- for _, sf := range f.subFields {
- comment += "//\t*" + sf.oneofTypeName + "\n"
- }
- g.P(comment, Annotate(mc.message.file, f.fullPath, f.goName), " ", f.goType, " `", f.tags, "`")
-}
-
-// getter for a oneof field will print additional discriminators and interfaces for the oneof,
-// also it prints all the getters for the sub fields.
-func (f *oneofField) getter(g *Generator, mc *msgCtx) {
- // The discriminator type
- g.P("type ", f.goType, " interface {")
- g.P(f.goType, "()")
- g.P("}")
- g.P()
- // The subField types, fulfilling the discriminator type contract
- for _, sf := range f.subFields {
- g.P("type ", Annotate(mc.message.file, sf.fullPath, sf.oneofTypeName), " struct {")
- g.P(Annotate(mc.message.file, sf.fullPath, sf.goName), " ", sf.goType, " `", sf.tags, "`")
- g.P("}")
- g.P()
- }
- for _, sf := range f.subFields {
- g.P("func (*", sf.oneofTypeName, ") ", f.goType, "() {}")
- g.P()
- }
- // Getter for the oneof field
- g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, f.fullPath, f.getterName), "() ", f.goType, " {")
- g.P("if m != nil { return m.", f.goName, " }")
- g.P("return nil")
- g.P("}")
- g.P()
- // Getters for each oneof
- for _, sf := range f.subFields {
- if sf.deprecated != "" {
- g.P(sf.deprecated)
- }
- g.P("func (m *", mc.goName, ") ", Annotate(mc.message.file, sf.fullPath, sf.getterName), "() "+sf.goType+" {")
- g.P("if x, ok := m.", f.getterName, "().(*", sf.oneofTypeName, "); ok {")
- g.P("return x.", sf.goName)
- g.P("}")
- g.P("return ", sf.getterDef)
- g.P("}")
- g.P()
- }
-}
-
-// setter prints the setter method of the field.
-func (f *oneofField) setter(g *Generator, mc *msgCtx) {
- // No setters for oneof yet
-}
-
-// topLevelField interface implemented by all types of fields on the top level (not oneofSubField).
-type topLevelField interface {
- decl(g *Generator, mc *msgCtx) // print declaration within the struct
- getter(g *Generator, mc *msgCtx) // print getter
- setter(g *Generator, mc *msgCtx) // print setter if applicable
-}
-
-// defField interface implemented by all types of fields that can have defaults (not oneofField, but instead oneofSubField).
-type defField interface {
- getProtoDef() string // default value explicitly stated in the proto file, e.g "yoshi" or "5"
- getProtoName() string // proto name of a field, e.g. "field_name" or "descriptor"
- getGoType() string // go type of the field as a string, e.g. "*int32"
- getProtoTypeName() string // protobuf type name for the field, e.g. ".google.protobuf.Duration"
- getProtoType() descriptor.FieldDescriptorProto_Type // *field.Type value, e.g. descriptor.FieldDescriptorProto_TYPE_FIXED64
-}
-
-// generateDefaultConstants adds constants for default values if needed, which is only if the default value is.
-// explicit in the proto.
-func (g *Generator) generateDefaultConstants(mc *msgCtx, topLevelFields []topLevelField) {
- // Collect fields that can have defaults
- dFields := []defField{}
- for _, pf := range topLevelFields {
- if f, ok := pf.(*oneofField); ok {
- for _, osf := range f.subFields {
- dFields = append(dFields, osf)
- }
- continue
- }
- dFields = append(dFields, pf.(defField))
- }
- for _, df := range dFields {
- def := df.getProtoDef()
- if def == "" {
- continue
- }
- fieldname := g.defaultConstantName(mc.goName, df.getProtoName())
- typename := df.getGoType()
- if typename[0] == '*' {
- typename = typename[1:]
- }
- kind := "const "
- switch {
- case typename == "bool":
- case typename == "string":
- def = strconv.Quote(def)
- case typename == "[]byte":
- def = "[]byte(" + strconv.Quote(unescape(def)) + ")"
- kind = "var "
- case def == "inf", def == "-inf", def == "nan":
- // These names are known to, and defined by, the protocol language.
- switch def {
- case "inf":
- def = "math.Inf(1)"
- case "-inf":
- def = "math.Inf(-1)"
- case "nan":
- def = "math.NaN()"
- }
- if df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_FLOAT {
- def = "float32(" + def + ")"
- }
- kind = "var "
- case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_FLOAT:
- if f, err := strconv.ParseFloat(def, 32); err == nil {
- def = fmt.Sprint(float32(f))
- }
- case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_DOUBLE:
- if f, err := strconv.ParseFloat(def, 64); err == nil {
- def = fmt.Sprint(f)
- }
- case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_ENUM:
- // Must be an enum. Need to construct the prefixed name.
- obj := g.ObjectNamed(df.getProtoTypeName())
- var enum *EnumDescriptor
- if id, ok := obj.(*ImportedDescriptor); ok {
- // The enum type has been publicly imported.
- enum, _ = id.o.(*EnumDescriptor)
- } else {
- enum, _ = obj.(*EnumDescriptor)
- }
- if enum == nil {
- log.Printf("don't know how to generate constant for %s", fieldname)
- continue
- }
- def = g.DefaultPackageName(obj) + enum.prefix() + def
- }
- g.P(kind, fieldname, " ", typename, " = ", def)
- g.file.addExport(mc.message, constOrVarSymbol{fieldname, kind, ""})
- }
- g.P()
-}
-
-// generateInternalStructFields just adds the XXX_ fields to the message struct.
-func (g *Generator) generateInternalStructFields(mc *msgCtx, topLevelFields []topLevelField) {
- g.P("XXX_NoUnkeyedLiteral\tstruct{} `json:\"-\"`") // prevent unkeyed struct literals
- if len(mc.message.ExtensionRange) > 0 {
- messageset := ""
- if opts := mc.message.Options; opts != nil && opts.GetMessageSetWireFormat() {
- messageset = "protobuf_messageset:\"1\" "
- }
- g.P(g.Pkg["proto"], ".XXX_InternalExtensions `", messageset, "json:\"-\"`")
- }
- g.P("XXX_unrecognized\t[]byte `json:\"-\"`")
- g.P("XXX_sizecache\tint32 `json:\"-\"`")
-
-}
-
-// generateOneofFuncs adds all the utility functions for oneof, including marshalling, unmarshalling and sizer.
-func (g *Generator) generateOneofFuncs(mc *msgCtx, topLevelFields []topLevelField) {
- ofields := []*oneofField{}
- for _, f := range topLevelFields {
- if o, ok := f.(*oneofField); ok {
- ofields = append(ofields, o)
- }
- }
- if len(ofields) == 0 {
- return
- }
-
- // OneofFuncs
- g.P("// XXX_OneofWrappers is for the internal use of the proto package.")
- g.P("func (*", mc.goName, ") XXX_OneofWrappers() []interface{} {")
- g.P("return []interface{}{")
- for _, of := range ofields {
- for _, sf := range of.subFields {
- sf.typedNil(g)
- }
- }
- g.P("}")
- g.P("}")
- g.P()
-}
-
-// generateMessageStruct adds the actual struct with it's members (but not methods) to the output.
-func (g *Generator) generateMessageStruct(mc *msgCtx, topLevelFields []topLevelField) {
- comments := g.PrintComments(mc.message.path)
-
- // Guarantee deprecation comments appear after user-provided comments.
- if mc.message.GetOptions().GetDeprecated() {
- if comments {
- // Convention: Separate deprecation comments from original
- // comments with an empty line.
- g.P("//")
- }
- g.P(deprecationComment)
- }
-
- g.P("type ", Annotate(mc.message.file, mc.message.path, mc.goName), " struct {")
- for _, pf := range topLevelFields {
- pf.decl(g, mc)
- }
- g.generateInternalStructFields(mc, topLevelFields)
- g.P("}")
-}
-
-// generateGetters adds getters for all fields, including oneofs and weak fields when applicable.
-func (g *Generator) generateGetters(mc *msgCtx, topLevelFields []topLevelField) {
- for _, pf := range topLevelFields {
- pf.getter(g, mc)
- }
-}
-
-// generateSetters add setters for all fields, including oneofs and weak fields when applicable.
-func (g *Generator) generateSetters(mc *msgCtx, topLevelFields []topLevelField) {
- for _, pf := range topLevelFields {
- pf.setter(g, mc)
- }
-}
-
-// generateCommonMethods adds methods to the message that are not on a per field basis.
-func (g *Generator) generateCommonMethods(mc *msgCtx) {
- // Reset, String and ProtoMessage methods.
- g.P("func (m *", mc.goName, ") Reset() { *m = ", mc.goName, "{} }")
- g.P("func (m *", mc.goName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }")
- g.P("func (*", mc.goName, ") ProtoMessage() {}")
- var indexes []string
- for m := mc.message; m != nil; m = m.parent {
- indexes = append([]string{strconv.Itoa(m.index)}, indexes...)
- }
- g.P("func (*", mc.goName, ") Descriptor() ([]byte, []int) {")
- g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}")
- g.P("}")
- g.P()
- // TODO: Revisit the decision to use a XXX_WellKnownType method
- // if we change proto.MessageName to work with multiple equivalents.
- if mc.message.file.GetPackage() == "google.protobuf" && wellKnownTypes[mc.message.GetName()] {
- g.P("func (*", mc.goName, `) XXX_WellKnownType() string { return "`, mc.message.GetName(), `" }`)
- g.P()
- }
-
- // Extension support methods
- if len(mc.message.ExtensionRange) > 0 {
- g.P()
- g.P("var extRange_", mc.goName, " = []", g.Pkg["proto"], ".ExtensionRange{")
- for _, r := range mc.message.ExtensionRange {
- end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends
- g.P("{Start: ", r.Start, ", End: ", end, "},")
- }
- g.P("}")
- g.P("func (*", mc.goName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {")
- g.P("return extRange_", mc.goName)
- g.P("}")
- g.P()
- }
-
- // TODO: It does not scale to keep adding another method for every
- // operation on protos that we want to switch over to using the
- // table-driven approach. Instead, we should only add a single method
- // that allows getting access to the *InternalMessageInfo struct and then
- // calling Unmarshal, Marshal, Merge, Size, and Discard directly on that.
-
- // Wrapper for table-driven marshaling and unmarshaling.
- g.P("func (m *", mc.goName, ") XXX_Unmarshal(b []byte) error {")
- g.P("return xxx_messageInfo_", mc.goName, ".Unmarshal(m, b)")
- g.P("}")
-
- g.P("func (m *", mc.goName, ") XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {")
- g.P("return xxx_messageInfo_", mc.goName, ".Marshal(b, m, deterministic)")
- g.P("}")
-
- g.P("func (m *", mc.goName, ") XXX_Merge(src ", g.Pkg["proto"], ".Message) {")
- g.P("xxx_messageInfo_", mc.goName, ".Merge(m, src)")
- g.P("}")
-
- g.P("func (m *", mc.goName, ") XXX_Size() int {") // avoid name clash with "Size" field in some message
- g.P("return xxx_messageInfo_", mc.goName, ".Size(m)")
- g.P("}")
-
- g.P("func (m *", mc.goName, ") XXX_DiscardUnknown() {")
- g.P("xxx_messageInfo_", mc.goName, ".DiscardUnknown(m)")
- g.P("}")
-
- g.P("var xxx_messageInfo_", mc.goName, " ", g.Pkg["proto"], ".InternalMessageInfo")
- g.P()
-}
-
-// Generate the type, methods and default constant definitions for this Descriptor.
-func (g *Generator) generateMessage(message *Descriptor) {
- topLevelFields := []topLevelField{}
- oFields := make(map[int32]*oneofField)
- // The full type name
- typeName := message.TypeName()
- // The full type name, CamelCased.
- goTypeName := CamelCaseSlice(typeName)
-
- usedNames := make(map[string]bool)
- for _, n := range methodNames {
- usedNames[n] = true
- }
-
- // allocNames finds a conflict-free variation of the given strings,
- // consistently mutating their suffixes.
- // It returns the same number of strings.
- allocNames := func(ns ...string) []string {
- Loop:
- for {
- for _, n := range ns {
- if usedNames[n] {
- for i := range ns {
- ns[i] += "_"
- }
- continue Loop
- }
- }
- for _, n := range ns {
- usedNames[n] = true
- }
- return ns
- }
- }
-
- mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) // keep track of the map fields to be added later
-
- // Build a structure more suitable for generating the text in one pass
- for i, field := range message.Field {
- // Allocate the getter and the field at the same time so name
- // collisions create field/method consistent names.
- // TODO: This allocation occurs based on the order of the fields
- // in the proto file, meaning that a change in the field
- // ordering can change generated Method/Field names.
- base := CamelCase(*field.Name)
- ns := allocNames(base, "Get"+base)
- fieldName, fieldGetterName := ns[0], ns[1]
- typename, wiretype := g.GoType(message, field)
- jsonName := *field.Name
- tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty")
-
- oneof := field.OneofIndex != nil
- if oneof && oFields[*field.OneofIndex] == nil {
- odp := message.OneofDecl[int(*field.OneofIndex)]
- base := CamelCase(odp.GetName())
- fname := allocNames(base)[0]
-
- // This is the first field of a oneof we haven't seen before.
- // Generate the union field.
- oneofFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)
- c, ok := g.makeComments(oneofFullPath)
- if ok {
- c += "\n//\n"
- }
- c += "// Types that are valid to be assigned to " + fname + ":\n"
- // Generate the rest of this comment later,
- // when we've computed any disambiguation.
-
- dname := "is" + goTypeName + "_" + fname
- tag := `protobuf_oneof:"` + odp.GetName() + `"`
- of := oneofField{
- fieldCommon: fieldCommon{
- goName: fname,
- getterName: "Get"+fname,
- goType: dname,
- tags: tag,
- protoName: odp.GetName(),
- fullPath: oneofFullPath,
- },
- comment: c,
- }
- topLevelFields = append(topLevelFields, &of)
- oFields[*field.OneofIndex] = &of
- }
-
- if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE {
- desc := g.ObjectNamed(field.GetTypeName())
- if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() {
- // Figure out the Go types and tags for the key and value types.
- keyField, valField := d.Field[0], d.Field[1]
- keyType, keyWire := g.GoType(d, keyField)
- valType, valWire := g.GoType(d, valField)
- keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire)
-
- // We don't use stars, except for message-typed values.
- // Message and enum types are the only two possibly foreign types used in maps,
- // so record their use. They are not permitted as map keys.
- keyType = strings.TrimPrefix(keyType, "*")
- switch *valField.Type {
- case descriptor.FieldDescriptorProto_TYPE_ENUM:
- valType = strings.TrimPrefix(valType, "*")
- g.RecordTypeUse(valField.GetTypeName())
- case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
- g.RecordTypeUse(valField.GetTypeName())
- default:
- valType = strings.TrimPrefix(valType, "*")
- }
-
- typename = fmt.Sprintf("map[%s]%s", keyType, valType)
- mapFieldTypes[field] = typename // record for the getter generation
-
- tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag)
- }
- }
-
- fieldDeprecated := ""
- if field.GetOptions().GetDeprecated() {
- fieldDeprecated = deprecationComment
- }
-
- dvalue := g.getterDefault(field, goTypeName)
- if oneof {
- tname := goTypeName + "_" + fieldName
- // It is possible for this to collide with a message or enum
- // nested in this message. Check for collisions.
- for {
- ok := true
- for _, desc := range message.nested {
- if CamelCaseSlice(desc.TypeName()) == tname {
- ok = false
- break
- }
- }
- for _, enum := range message.enums {
- if CamelCaseSlice(enum.TypeName()) == tname {
- ok = false
- break
- }
- }
- if !ok {
- tname += "_"
- continue
- }
- break
- }
-
- oneofField := oFields[*field.OneofIndex]
- tag := "protobuf:" + g.goTag(message, field, wiretype)
- sf := oneofSubField{
- fieldCommon: fieldCommon{
- goName: fieldName,
- getterName: fieldGetterName,
- goType: typename,
- tags: tag,
- protoName: field.GetName(),
- fullPath: fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i),
- },
- protoTypeName: field.GetTypeName(),
- fieldNumber: int(*field.Number),
- protoType: *field.Type,
- getterDef: dvalue,
- protoDef: field.GetDefaultValue(),
- oneofTypeName: tname,
- deprecated: fieldDeprecated,
- }
- oneofField.subFields = append(oneofField.subFields, &sf)
- g.RecordTypeUse(field.GetTypeName())
- continue
- }
-
- fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)
- c, ok := g.makeComments(fieldFullPath)
- if ok {
- c += "\n"
- }
- rf := simpleField{
- fieldCommon: fieldCommon{
- goName: fieldName,
- getterName: fieldGetterName,
- goType: typename,
- tags: tag,
- protoName: field.GetName(),
- fullPath: fieldFullPath,
- },
- protoTypeName: field.GetTypeName(),
- protoType: *field.Type,
- deprecated: fieldDeprecated,
- getterDef: dvalue,
- protoDef: field.GetDefaultValue(),
- comment: c,
- }
- var pf topLevelField = &rf
-
- topLevelFields = append(topLevelFields, pf)
- g.RecordTypeUse(field.GetTypeName())
- }
-
- mc := &msgCtx{
- goName: goTypeName,
- message: message,
- }
-
- g.generateMessageStruct(mc, topLevelFields)
- g.P()
- g.generateCommonMethods(mc)
- g.P()
- g.generateDefaultConstants(mc, topLevelFields)
- g.P()
- g.generateGetters(mc, topLevelFields)
- g.P()
- g.generateSetters(mc, topLevelFields)
- g.P()
- g.generateOneofFuncs(mc, topLevelFields)
- g.P()
-
- var oneofTypes []string
- for _, f := range topLevelFields {
- if of, ok := f.(*oneofField); ok {
- for _, osf := range of.subFields {
- oneofTypes = append(oneofTypes, osf.oneofTypeName)
- }
- }
- }
-
- opts := message.Options
- ms := &messageSymbol{
- sym: goTypeName,
- hasExtensions: len(message.ExtensionRange) > 0,
- isMessageSet: opts != nil && opts.GetMessageSetWireFormat(),
- oneofTypes: oneofTypes,
- }
- g.file.addExport(message, ms)
-
- for _, ext := range message.ext {
- g.generateExtension(ext)
- }
-
- fullName := strings.Join(message.TypeName(), ".")
- if g.file.Package != nil {
- fullName = *g.file.Package + "." + fullName
- }
-
- g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], goTypeName, fullName)
- // Register types for native map types.
- for _, k := range mapFieldKeys(mapFieldTypes) {
- fullName := strings.TrimPrefix(*k.TypeName, ".")
- g.addInitf("%s.RegisterMapType((%s)(nil), %q)", g.Pkg["proto"], mapFieldTypes[k], fullName)
- }
-
-}
-
-type byTypeName []*descriptor.FieldDescriptorProto
-
-func (a byTypeName) Len() int { return len(a) }
-func (a byTypeName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-func (a byTypeName) Less(i, j int) bool { return *a[i].TypeName < *a[j].TypeName }
-
-// mapFieldKeys returns the keys of m in a consistent order.
-func mapFieldKeys(m map[*descriptor.FieldDescriptorProto]string) []*descriptor.FieldDescriptorProto {
- keys := make([]*descriptor.FieldDescriptorProto, 0, len(m))
- for k := range m {
- keys = append(keys, k)
- }
- sort.Sort(byTypeName(keys))
- return keys
-}
-
-var escapeChars = [256]byte{
- 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v', '\\': '\\', '"': '"', '\'': '\'', '?': '?',
-}
-
-// unescape reverses the "C" escaping that protoc does for default values of bytes fields.
-// It is best effort in that it effectively ignores malformed input. Seemingly invalid escape
-// sequences are conveyed, unmodified, into the decoded result.
-func unescape(s string) string {
- // NB: Sadly, we can't use strconv.Unquote because protoc will escape both
- // single and double quotes, but strconv.Unquote only allows one or the
- // other (based on actual surrounding quotes of its input argument).
-
- var out []byte
- for len(s) > 0 {
- // regular character, or too short to be valid escape
- if s[0] != '\\' || len(s) < 2 {
- out = append(out, s[0])
- s = s[1:]
- } else if c := escapeChars[s[1]]; c != 0 {
- // escape sequence
- out = append(out, c)
- s = s[2:]
- } else if s[1] == 'x' || s[1] == 'X' {
- // hex escape, e.g. "\x80
- if len(s) < 4 {
- // too short to be valid
- out = append(out, s[:2]...)
- s = s[2:]
- continue
- }
- v, err := strconv.ParseUint(s[2:4], 16, 8)
- if err != nil {
- out = append(out, s[:4]...)
- } else {
- out = append(out, byte(v))
- }
- s = s[4:]
- } else if '0' <= s[1] && s[1] <= '7' {
- // octal escape, can vary from 1 to 3 octal digits; e.g., "\0" "\40" or "\164"
- // so consume up to 2 more bytes or up to end-of-string
- n := len(s[1:]) - len(strings.TrimLeft(s[1:], "01234567"))
- if n > 3 {
- n = 3
- }
- v, err := strconv.ParseUint(s[1:1+n], 8, 8)
- if err != nil {
- out = append(out, s[:1+n]...)
- } else {
- out = append(out, byte(v))
- }
- s = s[1+n:]
- } else {
- // bad escape, just propagate the slash as-is
- out = append(out, s[0])
- s = s[1:]
- }
- }
-
- return string(out)
-}
-
-func (g *Generator) generateExtension(ext *ExtensionDescriptor) {
- ccTypeName := ext.DescName()
-
- extObj := g.ObjectNamed(*ext.Extendee)
- var extDesc *Descriptor
- if id, ok := extObj.(*ImportedDescriptor); ok {
- // This is extending a publicly imported message.
- // We need the underlying type for goTag.
- extDesc = id.o.(*Descriptor)
- } else {
- extDesc = extObj.(*Descriptor)
- }
- extendedType := "*" + g.TypeName(extObj) // always use the original
- field := ext.FieldDescriptorProto
- fieldType, wireType := g.GoType(ext.parent, field)
- tag := g.goTag(extDesc, field, wireType)
- g.RecordTypeUse(*ext.Extendee)
- if n := ext.FieldDescriptorProto.TypeName; n != nil {
- // foreign extension type
- g.RecordTypeUse(*n)
- }
-
- typeName := ext.TypeName()
-
- // Special case for proto2 message sets: If this extension is extending
- // proto2.bridge.MessageSet, and its final name component is "message_set_extension",
- // then drop that last component.
- //
- // TODO: This should be implemented in the text formatter rather than the generator.
- // In addition, the situation for when to apply this special case is implemented
- // differently in other languages:
- // https://github.com/google/protobuf/blob/aff10976/src/google/protobuf/text_format.cc#L1560
- if extDesc.GetOptions().GetMessageSetWireFormat() && typeName[len(typeName)-1] == "message_set_extension" {
- typeName = typeName[:len(typeName)-1]
- }
-
- // For text formatting, the package must be exactly what the .proto file declares,
- // ignoring overrides such as the go_package option, and with no dot/underscore mapping.
- extName := strings.Join(typeName, ".")
- if g.file.Package != nil {
- extName = *g.file.Package + "." + extName
- }
-
- g.P("var ", ccTypeName, " = &", g.Pkg["proto"], ".ExtensionDesc{")
- g.P("ExtendedType: (", extendedType, ")(nil),")
- g.P("ExtensionType: (", fieldType, ")(nil),")
- g.P("Field: ", field.Number, ",")
- g.P(`Name: "`, extName, `",`)
- g.P("Tag: ", tag, ",")
- g.P(`Filename: "`, g.file.GetName(), `",`)
-
- g.P("}")
- g.P()
-
- g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName())
-
- g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""})
-}
-
-func (g *Generator) generateInitFunction() {
- if len(g.init) == 0 {
- return
- }
- g.P("func init() {")
- for _, l := range g.init {
- g.P(l)
- }
- g.P("}")
- g.init = nil
-}
-
-func (g *Generator) generateFileDescriptor(file *FileDescriptor) {
- // Make a copy and trim source_code_info data.
- // TODO: Trim this more when we know exactly what we need.
- pb := proto.Clone(file.FileDescriptorProto).(*descriptor.FileDescriptorProto)
- pb.SourceCodeInfo = nil
-
- b, err := proto.Marshal(pb)
- if err != nil {
- g.Fail(err.Error())
- }
-
- var buf bytes.Buffer
- w, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression)
- w.Write(b)
- w.Close()
- b = buf.Bytes()
-
- v := file.VarName()
- g.P()
- g.P("func init() { ", g.Pkg["proto"], ".RegisterFile(", strconv.Quote(*file.Name), ", ", v, ") }")
- g.P("var ", v, " = []byte{")
- g.P("// ", len(b), " bytes of a gzipped FileDescriptorProto")
- for len(b) > 0 {
- n := 16
- if n > len(b) {
- n = len(b)
- }
-
- s := ""
- for _, c := range b[:n] {
- s += fmt.Sprintf("0x%02x,", c)
- }
- g.P(s)
-
- b = b[n:]
- }
- g.P("}")
-}
-
-func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) {
- // // We always print the full (proto-world) package name here.
- pkg := enum.File().GetPackage()
- if pkg != "" {
- pkg += "."
- }
- // The full type name
- typeName := enum.TypeName()
- // The full type name, CamelCased.
- ccTypeName := CamelCaseSlice(typeName)
- g.addInitf("%s.RegisterEnum(%q, %[3]s_name, %[3]s_value)", g.Pkg["proto"], pkg+ccTypeName, ccTypeName)
-}
-
-// And now lots of helper functions.
-
-// Is c an ASCII lower-case letter?
-func isASCIILower(c byte) bool {
- return 'a' <= c && c <= 'z'
-}
-
-// Is c an ASCII digit?
-func isASCIIDigit(c byte) bool {
- return '0' <= c && c <= '9'
-}
-
-// CamelCase returns the CamelCased name.
-// If there is an interior underscore followed by a lower case letter,
-// drop the underscore and convert the letter to upper case.
-// There is a remote possibility of this rewrite causing a name collision,
-// but it's so remote we're prepared to pretend it's nonexistent - since the
-// C++ generator lowercases names, it's extremely unlikely to have two fields
-// with different capitalizations.
-// In short, _my_field_name_2 becomes XMyFieldName_2.
-func CamelCase(s string) string {
- if s == "" {
- return ""
- }
- t := make([]byte, 0, 32)
- i := 0
- if s[0] == '_' {
- // Need a capital letter; drop the '_'.
- t = append(t, 'X')
- i++
- }
- // Invariant: if the next letter is lower case, it must be converted
- // to upper case.
- // That is, we process a word at a time, where words are marked by _ or
- // upper case letter. Digits are treated as words.
- for ; i < len(s); i++ {
- c := s[i]
- if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) {
- continue // Skip the underscore in s.
- }
- if isASCIIDigit(c) {
- t = append(t, c)
- continue
- }
- // Assume we have a letter now - if not, it's a bogus identifier.
- // The next word is a sequence of characters that must start upper case.
- if isASCIILower(c) {
- c ^= ' ' // Make it a capital letter.
- }
- t = append(t, c) // Guaranteed not lower case.
- // Accept lower case sequence that follows.
- for i+1 < len(s) && isASCIILower(s[i+1]) {
- i++
- t = append(t, s[i])
- }
- }
- return string(t)
-}
-
-// CamelCaseSlice is like CamelCase, but the argument is a slice of strings to
-// be joined with "_".
-func CamelCaseSlice(elem []string) string { return CamelCase(strings.Join(elem, "_")) }
-
-// dottedSlice turns a sliced name into a dotted name.
-func dottedSlice(elem []string) string { return strings.Join(elem, ".") }
-
-// Is this field optional?
-func isOptional(field *descriptor.FieldDescriptorProto) bool {
- return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_OPTIONAL
-}
-
-// Is this field required?
-func isRequired(field *descriptor.FieldDescriptorProto) bool {
- return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REQUIRED
-}
-
-// Is this field repeated?
-func isRepeated(field *descriptor.FieldDescriptorProto) bool {
- return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED
-}
-
-// Is this field a scalar numeric type?
-func isScalar(field *descriptor.FieldDescriptorProto) bool {
- if field.Type == nil {
- return false
- }
- switch *field.Type {
- case descriptor.FieldDescriptorProto_TYPE_DOUBLE,
- descriptor.FieldDescriptorProto_TYPE_FLOAT,
- descriptor.FieldDescriptorProto_TYPE_INT64,
- descriptor.FieldDescriptorProto_TYPE_UINT64,
- descriptor.FieldDescriptorProto_TYPE_INT32,
- descriptor.FieldDescriptorProto_TYPE_FIXED64,
- descriptor.FieldDescriptorProto_TYPE_FIXED32,
- descriptor.FieldDescriptorProto_TYPE_BOOL,
- descriptor.FieldDescriptorProto_TYPE_UINT32,
- descriptor.FieldDescriptorProto_TYPE_ENUM,
- descriptor.FieldDescriptorProto_TYPE_SFIXED32,
- descriptor.FieldDescriptorProto_TYPE_SFIXED64,
- descriptor.FieldDescriptorProto_TYPE_SINT32,
- descriptor.FieldDescriptorProto_TYPE_SINT64:
- return true
- default:
- return false
- }
-}
-
-// badToUnderscore is the mapping function used to generate Go names from package names,
-// which can be dotted in the input .proto file. It replaces non-identifier characters such as
-// dot or dash with underscore.
-func badToUnderscore(r rune) rune {
- if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' {
- return r
- }
- return '_'
-}
-
-// baseName returns the last path element of the name, with the last dotted suffix removed.
-func baseName(name string) string {
- // First, find the last element
- if i := strings.LastIndex(name, "/"); i >= 0 {
- name = name[i+1:]
- }
- // Now drop the suffix
- if i := strings.LastIndex(name, "."); i >= 0 {
- name = name[0:i]
- }
- return name
-}
-
-// The SourceCodeInfo message describes the location of elements of a parsed
-// .proto file by way of a "path", which is a sequence of integers that
-// describe the route from a FileDescriptorProto to the relevant submessage.
-// The path alternates between a field number of a repeated field, and an index
-// into that repeated field. The constants below define the field numbers that
-// are used.
-//
-// See descriptor.proto for more information about this.
-const (
- // tag numbers in FileDescriptorProto
- packagePath = 2 // package
- messagePath = 4 // message_type
- enumPath = 5 // enum_type
- // tag numbers in DescriptorProto
- messageFieldPath = 2 // field
- messageMessagePath = 3 // nested_type
- messageEnumPath = 4 // enum_type
- messageOneofPath = 8 // oneof_decl
- // tag numbers in EnumDescriptorProto
- enumValuePath = 2 // value
-)
-
-var supportTypeAliases bool
-
-func init() {
- for _, tag := range build.Default.ReleaseTags {
- if tag == "go1.9" {
- supportTypeAliases = true
- return
- }
- }
-}
diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go
deleted file mode 100644
index a9b61036c..000000000
--- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/internal/remap/remap.go
+++ /dev/null
@@ -1,117 +0,0 @@
-// Go support for Protocol Buffers - Google's data interchange format
-//
-// Copyright 2017 The Go Authors. All rights reserved.
-// https://github.com/golang/protobuf
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/*
-Package remap handles tracking the locations of Go tokens in a source text
-across a rewrite by the Go formatter.
-*/
-package remap
-
-import (
- "fmt"
- "go/scanner"
- "go/token"
-)
-
-// A Location represents a span of byte offsets in the source text.
-type Location struct {
- Pos, End int // End is exclusive
-}
-
-// A Map represents a mapping between token locations in an input source text
-// and locations in the correspnding output text.
-type Map map[Location]Location
-
-// Find reports whether the specified span is recorded by m, and if so returns
-// the new location it was mapped to. If the input span was not found, the
-// returned location is the same as the input.
-func (m Map) Find(pos, end int) (Location, bool) {
- key := Location{
- Pos: pos,
- End: end,
- }
- if loc, ok := m[key]; ok {
- return loc, true
- }
- return key, false
-}
-
-func (m Map) add(opos, oend, npos, nend int) {
- m[Location{Pos: opos, End: oend}] = Location{Pos: npos, End: nend}
-}
-
-// Compute constructs a location mapping from input to output. An error is
-// reported if any of the tokens of output cannot be mapped.
-func Compute(input, output []byte) (Map, error) {
- itok := tokenize(input)
- otok := tokenize(output)
- if len(itok) != len(otok) {
- return nil, fmt.Errorf("wrong number of tokens, %d ≠ %d", len(itok), len(otok))
- }
- m := make(Map)
- for i, ti := range itok {
- to := otok[i]
- if ti.Token != to.Token {
- return nil, fmt.Errorf("token %d type mismatch: %s ≠ %s", i+1, ti, to)
- }
- m.add(ti.pos, ti.end, to.pos, to.end)
- }
- return m, nil
-}
-
-// tokinfo records the span and type of a source token.
-type tokinfo struct {
- pos, end int
- token.Token
-}
-
-func tokenize(src []byte) []tokinfo {
- fs := token.NewFileSet()
- var s scanner.Scanner
- s.Init(fs.AddFile("src", fs.Base(), len(src)), src, nil, scanner.ScanComments)
- var info []tokinfo
- for {
- pos, next, lit := s.Scan()
- switch next {
- case token.SEMICOLON:
- continue
- }
- info = append(info, tokinfo{
- pos: int(pos - 1),
- end: int(pos + token.Pos(len(lit)) - 1),
- Token: next,
- })
- if next == token.EOF {
- break
- }
- }
- return info
-}
diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go
deleted file mode 100644
index 61bfc10e0..000000000
--- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go
+++ /dev/null
@@ -1,369 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: google/protobuf/compiler/plugin.proto
-
-/*
-Package plugin_go is a generated protocol buffer package.
-
-It is generated from these files:
- google/protobuf/compiler/plugin.proto
-
-It has these top-level messages:
- Version
- CodeGeneratorRequest
- CodeGeneratorResponse
-*/
-package plugin_go
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
-
-// The version number of protocol compiler.
-type Version struct {
- Major *int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"`
- Minor *int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"`
- Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"`
- // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
- // be empty for mainline stable releases.
- Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Version) Reset() { *m = Version{} }
-func (m *Version) String() string { return proto.CompactTextString(m) }
-func (*Version) ProtoMessage() {}
-func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
-func (m *Version) Unmarshal(b []byte) error {
- return xxx_messageInfo_Version.Unmarshal(m, b)
-}
-func (m *Version) Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Version.Marshal(b, m, deterministic)
-}
-func (dst *Version) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Version.Merge(dst, src)
-}
-func (m *Version) XXX_Size() int {
- return xxx_messageInfo_Version.Size(m)
-}
-func (m *Version) XXX_DiscardUnknown() {
- xxx_messageInfo_Version.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Version proto.InternalMessageInfo
-
-func (m *Version) GetMajor() int32 {
- if m != nil && m.Major != nil {
- return *m.Major
- }
- return 0
-}
-
-func (m *Version) GetMinor() int32 {
- if m != nil && m.Minor != nil {
- return *m.Minor
- }
- return 0
-}
-
-func (m *Version) GetPatch() int32 {
- if m != nil && m.Patch != nil {
- return *m.Patch
- }
- return 0
-}
-
-func (m *Version) GetSuffix() string {
- if m != nil && m.Suffix != nil {
- return *m.Suffix
- }
- return ""
-}
-
-// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-type CodeGeneratorRequest struct {
- // The .proto files that were explicitly listed on the command-line. The
- // code generator should generate code only for these files. Each file's
- // descriptor will be included in proto_file, below.
- FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate,json=fileToGenerate" json:"file_to_generate,omitempty"`
- // The generator parameter passed on the command-line.
- Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"`
- // FileDescriptorProtos for all files in files_to_generate and everything
- // they import. The files will appear in topological order, so each file
- // appears before any file that imports it.
- //
- // protoc guarantees that all proto_files will be written after
- // the fields above, even though this is not technically guaranteed by the
- // protobuf wire format. This theoretically could allow a plugin to stream
- // in the FileDescriptorProtos and handle them one by one rather than read
- // the entire set into memory at once. However, as of this writing, this
- // is not similarly optimized on protoc's end -- it will store all fields in
- // memory at once before sending them to the plugin.
- //
- // Type names of fields and extensions in the FileDescriptorProto are always
- // fully qualified.
- ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"`
- // The version number of protocol compiler.
- CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} }
-func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) }
-func (*CodeGeneratorRequest) ProtoMessage() {}
-func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
-func (m *CodeGeneratorRequest) Unmarshal(b []byte) error {
- return xxx_messageInfo_CodeGeneratorRequest.Unmarshal(m, b)
-}
-func (m *CodeGeneratorRequest) Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_CodeGeneratorRequest.Marshal(b, m, deterministic)
-}
-func (dst *CodeGeneratorRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_CodeGeneratorRequest.Merge(dst, src)
-}
-func (m *CodeGeneratorRequest) XXX_Size() int {
- return xxx_messageInfo_CodeGeneratorRequest.Size(m)
-}
-func (m *CodeGeneratorRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_CodeGeneratorRequest.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_CodeGeneratorRequest proto.InternalMessageInfo
-
-func (m *CodeGeneratorRequest) GetFileToGenerate() []string {
- if m != nil {
- return m.FileToGenerate
- }
- return nil
-}
-
-func (m *CodeGeneratorRequest) GetParameter() string {
- if m != nil && m.Parameter != nil {
- return *m.Parameter
- }
- return ""
-}
-
-func (m *CodeGeneratorRequest) GetProtoFile() []*google_protobuf.FileDescriptorProto {
- if m != nil {
- return m.ProtoFile
- }
- return nil
-}
-
-func (m *CodeGeneratorRequest) GetCompilerVersion() *Version {
- if m != nil {
- return m.CompilerVersion
- }
- return nil
-}
-
-// The plugin writes an encoded CodeGeneratorResponse to stdout.
-type CodeGeneratorResponse struct {
- // Error message. If non-empty, code generation failed. The plugin process
- // should exit with status code zero even if it reports an error in this way.
- //
- // This should be used to indicate errors in .proto files which prevent the
- // code generator from generating correct code. Errors which indicate a
- // problem in protoc itself -- such as the input CodeGeneratorRequest being
- // unparseable -- should be reported by writing a message to stderr and
- // exiting with a non-zero status code.
- Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"`
- File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} }
-func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) }
-func (*CodeGeneratorResponse) ProtoMessage() {}
-func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
-func (m *CodeGeneratorResponse) Unmarshal(b []byte) error {
- return xxx_messageInfo_CodeGeneratorResponse.Unmarshal(m, b)
-}
-func (m *CodeGeneratorResponse) Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_CodeGeneratorResponse.Marshal(b, m, deterministic)
-}
-func (dst *CodeGeneratorResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_CodeGeneratorResponse.Merge(dst, src)
-}
-func (m *CodeGeneratorResponse) XXX_Size() int {
- return xxx_messageInfo_CodeGeneratorResponse.Size(m)
-}
-func (m *CodeGeneratorResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_CodeGeneratorResponse.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_CodeGeneratorResponse proto.InternalMessageInfo
-
-func (m *CodeGeneratorResponse) GetError() string {
- if m != nil && m.Error != nil {
- return *m.Error
- }
- return ""
-}
-
-func (m *CodeGeneratorResponse) GetFile() []*CodeGeneratorResponse_File {
- if m != nil {
- return m.File
- }
- return nil
-}
-
-// Represents a single generated file.
-type CodeGeneratorResponse_File struct {
- // The file name, relative to the output directory. The name must not
- // contain "." or ".." components and must be relative, not be absolute (so,
- // the file cannot lie outside the output directory). "/" must be used as
- // the path separator, not "\".
- //
- // If the name is omitted, the content will be appended to the previous
- // file. This allows the generator to break large files into small chunks,
- // and allows the generated text to be streamed back to protoc so that large
- // files need not reside completely in memory at one time. Note that as of
- // this writing protoc does not optimize for this -- it will read the entire
- // CodeGeneratorResponse before writing files to disk.
- Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
- // If non-empty, indicates that the named file should already exist, and the
- // content here is to be inserted into that file at a defined insertion
- // point. This feature allows a code generator to extend the output
- // produced by another code generator. The original generator may provide
- // insertion points by placing special annotations in the file that look
- // like:
- // @@protoc_insertion_point(NAME)
- // The annotation can have arbitrary text before and after it on the line,
- // which allows it to be placed in a comment. NAME should be replaced with
- // an identifier naming the point -- this is what other generators will use
- // as the insertion_point. Code inserted at this point will be placed
- // immediately above the line containing the insertion point (thus multiple
- // insertions to the same point will come out in the order they were added).
- // The double-@ is intended to make it unlikely that the generated code
- // could contain things that look like insertion points by accident.
- //
- // For example, the C++ code generator places the following line in the
- // .pb.h files that it generates:
- // // @@protoc_insertion_point(namespace_scope)
- // This line appears within the scope of the file's package namespace, but
- // outside of any particular class. Another plugin can then specify the
- // insertion_point "namespace_scope" to generate additional classes or
- // other declarations that should be placed in this scope.
- //
- // Note that if the line containing the insertion point begins with
- // whitespace, the same whitespace will be added to every line of the
- // inserted text. This is useful for languages like Python, where
- // indentation matters. In these languages, the insertion point comment
- // should be indented the same amount as any inserted code will need to be
- // in order to work correctly in that context.
- //
- // The code generator that generates the initial file and the one which
- // inserts into it must both run as part of a single invocation of protoc.
- // Code generators are executed in the order in which they appear on the
- // command line.
- //
- // If |insertion_point| is present, |name| must also be present.
- InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"`
- // The file contents.
- Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} }
-func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) }
-func (*CodeGeneratorResponse_File) ProtoMessage() {}
-func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} }
-func (m *CodeGeneratorResponse_File) Unmarshal(b []byte) error {
- return xxx_messageInfo_CodeGeneratorResponse_File.Unmarshal(m, b)
-}
-func (m *CodeGeneratorResponse_File) Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_CodeGeneratorResponse_File.Marshal(b, m, deterministic)
-}
-func (dst *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) {
- xxx_messageInfo_CodeGeneratorResponse_File.Merge(dst, src)
-}
-func (m *CodeGeneratorResponse_File) XXX_Size() int {
- return xxx_messageInfo_CodeGeneratorResponse_File.Size(m)
-}
-func (m *CodeGeneratorResponse_File) XXX_DiscardUnknown() {
- xxx_messageInfo_CodeGeneratorResponse_File.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_CodeGeneratorResponse_File proto.InternalMessageInfo
-
-func (m *CodeGeneratorResponse_File) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *CodeGeneratorResponse_File) GetInsertionPoint() string {
- if m != nil && m.InsertionPoint != nil {
- return *m.InsertionPoint
- }
- return ""
-}
-
-func (m *CodeGeneratorResponse_File) GetContent() string {
- if m != nil && m.Content != nil {
- return *m.Content
- }
- return ""
-}
-
-func init() {
- proto.RegisterType((*Version)(nil), "google.protobuf.compiler.Version")
- proto.RegisterType((*CodeGeneratorRequest)(nil), "google.protobuf.compiler.CodeGeneratorRequest")
- proto.RegisterType((*CodeGeneratorResponse)(nil), "google.protobuf.compiler.CodeGeneratorResponse")
- proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File")
-}
-
-func init() { proto.RegisterFile("google/protobuf/compiler/plugin.proto", fileDescriptor0) }
-
-var fileDescriptor0 = []byte{
- // 417 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcf, 0x6a, 0x14, 0x41,
- 0x10, 0xc6, 0x19, 0x77, 0x63, 0x98, 0x8a, 0x64, 0x43, 0x13, 0xa5, 0x09, 0x39, 0x8c, 0x8b, 0xe2,
- 0x5c, 0x32, 0x0b, 0xc1, 0x8b, 0x78, 0x4b, 0x44, 0x3d, 0x78, 0x58, 0x1a, 0xf1, 0x20, 0xc8, 0x30,
- 0x99, 0xd4, 0x74, 0x5a, 0x66, 0xba, 0xc6, 0xee, 0x1e, 0xf1, 0x49, 0x7d, 0x0f, 0xdf, 0x40, 0xfa,
- 0xcf, 0x24, 0xb2, 0xb8, 0xa7, 0xee, 0xef, 0x57, 0xd5, 0xd5, 0x55, 0x1f, 0x05, 0x2f, 0x25, 0x91,
- 0xec, 0x71, 0x33, 0x1a, 0x72, 0x74, 0x33, 0x75, 0x9b, 0x96, 0x86, 0x51, 0xf5, 0x68, 0x36, 0x63,
- 0x3f, 0x49, 0xa5, 0xab, 0x10, 0x60, 0x3c, 0xa6, 0x55, 0x73, 0x5a, 0x35, 0xa7, 0x9d, 0x15, 0xbb,
- 0x05, 0x6e, 0xd1, 0xb6, 0x46, 0x8d, 0x8e, 0x4c, 0xcc, 0x5e, 0xb7, 0x70, 0xf8, 0x05, 0x8d, 0x55,
- 0xa4, 0xd9, 0x29, 0x1c, 0x0c, 0xcd, 0x77, 0x32, 0x3c, 0x2b, 0xb2, 0xf2, 0x40, 0x44, 0x11, 0xa8,
- 0xd2, 0x64, 0xf8, 0xa3, 0x44, 0xbd, 0xf0, 0x74, 0x6c, 0x5c, 0x7b, 0xc7, 0x17, 0x91, 0x06, 0xc1,
- 0x9e, 0xc1, 0x63, 0x3b, 0x75, 0x9d, 0xfa, 0xc5, 0x97, 0x45, 0x56, 0xe6, 0x22, 0xa9, 0xf5, 0x9f,
- 0x0c, 0x4e, 0xaf, 0xe9, 0x16, 0x3f, 0xa0, 0x46, 0xd3, 0x38, 0x32, 0x02, 0x7f, 0x4c, 0x68, 0x1d,
- 0x2b, 0xe1, 0xa4, 0x53, 0x3d, 0xd6, 0x8e, 0x6a, 0x19, 0x63, 0xc8, 0xb3, 0x62, 0x51, 0xe6, 0xe2,
- 0xd8, 0xf3, 0xcf, 0x94, 0x5e, 0x20, 0x3b, 0x87, 0x7c, 0x6c, 0x4c, 0x33, 0xa0, 0xc3, 0xd8, 0x4a,
- 0x2e, 0x1e, 0x00, 0xbb, 0x06, 0x08, 0xe3, 0xd4, 0xfe, 0x15, 0x5f, 0x15, 0x8b, 0xf2, 0xe8, 0xf2,
- 0x45, 0xb5, 0x6b, 0xcb, 0x7b, 0xd5, 0xe3, 0xbb, 0x7b, 0x03, 0xb6, 0x1e, 0x8b, 0x3c, 0x44, 0x7d,
- 0x84, 0x7d, 0x82, 0x93, 0xd9, 0xb8, 0xfa, 0x67, 0xf4, 0x24, 0x8c, 0x77, 0x74, 0xf9, 0xbc, 0xda,
- 0xe7, 0x70, 0x95, 0xcc, 0x13, 0xab, 0x99, 0x24, 0xb0, 0xfe, 0x9d, 0xc1, 0xd3, 0x9d, 0x99, 0xed,
- 0x48, 0xda, 0xa2, 0xf7, 0x0e, 0x8d, 0x49, 0x3e, 0xe7, 0x22, 0x0a, 0xf6, 0x11, 0x96, 0xff, 0x34,
- 0xff, 0x7a, 0xff, 0x8f, 0xff, 0x2d, 0x1a, 0x66, 0x13, 0xa1, 0xc2, 0xd9, 0x37, 0x58, 0x86, 0x79,
- 0x18, 0x2c, 0x75, 0x33, 0x60, 0xfa, 0x26, 0xdc, 0xd9, 0x2b, 0x58, 0x29, 0x6d, 0xd1, 0x38, 0x45,
- 0xba, 0x1e, 0x49, 0x69, 0x97, 0xcc, 0x3c, 0xbe, 0xc7, 0x5b, 0x4f, 0x19, 0x87, 0xc3, 0x96, 0xb4,
- 0x43, 0xed, 0xf8, 0x2a, 0x24, 0xcc, 0xf2, 0x4a, 0xc2, 0x79, 0x4b, 0xc3, 0xde, 0xfe, 0xae, 0x9e,
- 0x6c, 0xc3, 0x6e, 0x06, 0x7b, 0xed, 0xd7, 0x37, 0x52, 0xb9, 0xbb, 0xe9, 0xc6, 0x87, 0x37, 0x92,
- 0xfa, 0x46, 0xcb, 0x87, 0x65, 0x0c, 0x97, 0xf6, 0x42, 0xa2, 0xbe, 0x90, 0x94, 0x56, 0xfa, 0x6d,
- 0x3c, 0x6a, 0x49, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x15, 0x40, 0xc5, 0xfe, 0x02, 0x00,
- 0x00,
-}
diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden
deleted file mode 100644
index 8953d0ff8..000000000
--- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden
+++ /dev/null
@@ -1,83 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google/protobuf/compiler/plugin.proto
-// DO NOT EDIT!
-
-package google_protobuf_compiler
-
-import proto "github.com/golang/protobuf/proto"
-import "math"
-import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
-
-// Reference proto and math imports to suppress error if they are not otherwise used.
-var _ = proto.GetString
-var _ = math.Inf
-
-type CodeGeneratorRequest struct {
- FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate" json:"file_to_generate,omitempty"`
- Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"`
- ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file" json:"proto_file,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (this *CodeGeneratorRequest) Reset() { *this = CodeGeneratorRequest{} }
-func (this *CodeGeneratorRequest) String() string { return proto.CompactTextString(this) }
-func (*CodeGeneratorRequest) ProtoMessage() {}
-
-func (this *CodeGeneratorRequest) GetParameter() string {
- if this != nil && this.Parameter != nil {
- return *this.Parameter
- }
- return ""
-}
-
-type CodeGeneratorResponse struct {
- Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"`
- File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (this *CodeGeneratorResponse) Reset() { *this = CodeGeneratorResponse{} }
-func (this *CodeGeneratorResponse) String() string { return proto.CompactTextString(this) }
-func (*CodeGeneratorResponse) ProtoMessage() {}
-
-func (this *CodeGeneratorResponse) GetError() string {
- if this != nil && this.Error != nil {
- return *this.Error
- }
- return ""
-}
-
-type CodeGeneratorResponse_File struct {
- Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
- InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point" json:"insertion_point,omitempty"`
- Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (this *CodeGeneratorResponse_File) Reset() { *this = CodeGeneratorResponse_File{} }
-func (this *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(this) }
-func (*CodeGeneratorResponse_File) ProtoMessage() {}
-
-func (this *CodeGeneratorResponse_File) GetName() string {
- if this != nil && this.Name != nil {
- return *this.Name
- }
- return ""
-}
-
-func (this *CodeGeneratorResponse_File) GetInsertionPoint() string {
- if this != nil && this.InsertionPoint != nil {
- return *this.InsertionPoint
- }
- return ""
-}
-
-func (this *CodeGeneratorResponse_File) GetContent() string {
- if this != nil && this.Content != nil {
- return *this.Content
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto
deleted file mode 100644
index 5b5574529..000000000
--- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto
+++ /dev/null
@@ -1,167 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//
-// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to
-// change.
-//
-// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is
-// just a program that reads a CodeGeneratorRequest from stdin and writes a
-// CodeGeneratorResponse to stdout.
-//
-// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
-// of dealing with the raw protocol defined here.
-//
-// A plugin executable needs only to be placed somewhere in the path. The
-// plugin should be named "protoc-gen-$NAME", and will then be used when the
-// flag "--${NAME}_out" is passed to protoc.
-
-syntax = "proto2";
-package google.protobuf.compiler;
-option java_package = "com.google.protobuf.compiler";
-option java_outer_classname = "PluginProtos";
-
-option go_package = "github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go";
-
-import "google/protobuf/descriptor.proto";
-
-// The version number of protocol compiler.
-message Version {
- optional int32 major = 1;
- optional int32 minor = 2;
- optional int32 patch = 3;
- // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
- // be empty for mainline stable releases.
- optional string suffix = 4;
-}
-
-// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-message CodeGeneratorRequest {
- // The .proto files that were explicitly listed on the command-line. The
- // code generator should generate code only for these files. Each file's
- // descriptor will be included in proto_file, below.
- repeated string file_to_generate = 1;
-
- // The generator parameter passed on the command-line.
- optional string parameter = 2;
-
- // FileDescriptorProtos for all files in files_to_generate and everything
- // they import. The files will appear in topological order, so each file
- // appears before any file that imports it.
- //
- // protoc guarantees that all proto_files will be written after
- // the fields above, even though this is not technically guaranteed by the
- // protobuf wire format. This theoretically could allow a plugin to stream
- // in the FileDescriptorProtos and handle them one by one rather than read
- // the entire set into memory at once. However, as of this writing, this
- // is not similarly optimized on protoc's end -- it will store all fields in
- // memory at once before sending them to the plugin.
- //
- // Type names of fields and extensions in the FileDescriptorProto are always
- // fully qualified.
- repeated FileDescriptorProto proto_file = 15;
-
- // The version number of protocol compiler.
- optional Version compiler_version = 3;
-
-}
-
-// The plugin writes an encoded CodeGeneratorResponse to stdout.
-message CodeGeneratorResponse {
- // Error message. If non-empty, code generation failed. The plugin process
- // should exit with status code zero even if it reports an error in this way.
- //
- // This should be used to indicate errors in .proto files which prevent the
- // code generator from generating correct code. Errors which indicate a
- // problem in protoc itself -- such as the input CodeGeneratorRequest being
- // unparseable -- should be reported by writing a message to stderr and
- // exiting with a non-zero status code.
- optional string error = 1;
-
- // Represents a single generated file.
- message File {
- // The file name, relative to the output directory. The name must not
- // contain "." or ".." components and must be relative, not be absolute (so,
- // the file cannot lie outside the output directory). "/" must be used as
- // the path separator, not "\".
- //
- // If the name is omitted, the content will be appended to the previous
- // file. This allows the generator to break large files into small chunks,
- // and allows the generated text to be streamed back to protoc so that large
- // files need not reside completely in memory at one time. Note that as of
- // this writing protoc does not optimize for this -- it will read the entire
- // CodeGeneratorResponse before writing files to disk.
- optional string name = 1;
-
- // If non-empty, indicates that the named file should already exist, and the
- // content here is to be inserted into that file at a defined insertion
- // point. This feature allows a code generator to extend the output
- // produced by another code generator. The original generator may provide
- // insertion points by placing special annotations in the file that look
- // like:
- // @@protoc_insertion_point(NAME)
- // The annotation can have arbitrary text before and after it on the line,
- // which allows it to be placed in a comment. NAME should be replaced with
- // an identifier naming the point -- this is what other generators will use
- // as the insertion_point. Code inserted at this point will be placed
- // immediately above the line containing the insertion point (thus multiple
- // insertions to the same point will come out in the order they were added).
- // The double-@ is intended to make it unlikely that the generated code
- // could contain things that look like insertion points by accident.
- //
- // For example, the C++ code generator places the following line in the
- // .pb.h files that it generates:
- // // @@protoc_insertion_point(namespace_scope)
- // This line appears within the scope of the file's package namespace, but
- // outside of any particular class. Another plugin can then specify the
- // insertion_point "namespace_scope" to generate additional classes or
- // other declarations that should be placed in this scope.
- //
- // Note that if the line containing the insertion point begins with
- // whitespace, the same whitespace will be added to every line of the
- // inserted text. This is useful for languages like Python, where
- // indentation matters. In these languages, the insertion point comment
- // should be indented the same amount as any inserted code will need to be
- // in order to work correctly in that context.
- //
- // The code generator that generates the initial file and the one which
- // inserts into it must both run as part of a single invocation of protoc.
- // Code generators are executed in the order in which they appear on the
- // command line.
- //
- // If |insertion_point| is present, |name| must also be present.
- optional string insertion_point = 2;
-
- // The file contents.
- optional string content = 15;
- }
- repeated File file = 15;
-}
diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go
deleted file mode 100644
index 33daa73dd..000000000
--- a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go
+++ /dev/null
@@ -1,336 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: google/protobuf/struct.proto
-
-package structpb
-
-import (
- fmt "fmt"
- proto "github.com/golang/protobuf/proto"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// `NullValue` is a singleton enumeration to represent the null value for the
-// `Value` type union.
-//
-// The JSON representation for `NullValue` is JSON `null`.
-type NullValue int32
-
-const (
- // Null value.
- NullValue_NULL_VALUE NullValue = 0
-)
-
-var NullValue_name = map[int32]string{
- 0: "NULL_VALUE",
-}
-
-var NullValue_value = map[string]int32{
- "NULL_VALUE": 0,
-}
-
-func (x NullValue) String() string {
- return proto.EnumName(NullValue_name, int32(x))
-}
-
-func (NullValue) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_df322afd6c9fb402, []int{0}
-}
-
-func (NullValue) XXX_WellKnownType() string { return "NullValue" }
-
-// `Struct` represents a structured data value, consisting of fields
-// which map to dynamically typed values. In some languages, `Struct`
-// might be supported by a native representation. For example, in
-// scripting languages like JS a struct is represented as an
-// object. The details of that representation are described together
-// with the proto support for the language.
-//
-// The JSON representation for `Struct` is JSON object.
-type Struct struct {
- // Unordered map of dynamically typed values.
- Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Struct) Reset() { *m = Struct{} }
-func (m *Struct) String() string { return proto.CompactTextString(m) }
-func (*Struct) ProtoMessage() {}
-func (*Struct) Descriptor() ([]byte, []int) {
- return fileDescriptor_df322afd6c9fb402, []int{0}
-}
-
-func (*Struct) XXX_WellKnownType() string { return "Struct" }
-
-func (m *Struct) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Struct.Unmarshal(m, b)
-}
-func (m *Struct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Struct.Marshal(b, m, deterministic)
-}
-func (m *Struct) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Struct.Merge(m, src)
-}
-func (m *Struct) XXX_Size() int {
- return xxx_messageInfo_Struct.Size(m)
-}
-func (m *Struct) XXX_DiscardUnknown() {
- xxx_messageInfo_Struct.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Struct proto.InternalMessageInfo
-
-func (m *Struct) GetFields() map[string]*Value {
- if m != nil {
- return m.Fields
- }
- return nil
-}
-
-// `Value` represents a dynamically typed value which can be either
-// null, a number, a string, a boolean, a recursive struct value, or a
-// list of values. A producer of value is expected to set one of that
-// variants, absence of any variant indicates an error.
-//
-// The JSON representation for `Value` is JSON value.
-type Value struct {
- // The kind of value.
- //
- // Types that are valid to be assigned to Kind:
- // *Value_NullValue
- // *Value_NumberValue
- // *Value_StringValue
- // *Value_BoolValue
- // *Value_StructValue
- // *Value_ListValue
- Kind isValue_Kind `protobuf_oneof:"kind"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Value) Reset() { *m = Value{} }
-func (m *Value) String() string { return proto.CompactTextString(m) }
-func (*Value) ProtoMessage() {}
-func (*Value) Descriptor() ([]byte, []int) {
- return fileDescriptor_df322afd6c9fb402, []int{1}
-}
-
-func (*Value) XXX_WellKnownType() string { return "Value" }
-
-func (m *Value) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Value.Unmarshal(m, b)
-}
-func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Value.Marshal(b, m, deterministic)
-}
-func (m *Value) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Value.Merge(m, src)
-}
-func (m *Value) XXX_Size() int {
- return xxx_messageInfo_Value.Size(m)
-}
-func (m *Value) XXX_DiscardUnknown() {
- xxx_messageInfo_Value.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Value proto.InternalMessageInfo
-
-type isValue_Kind interface {
- isValue_Kind()
-}
-
-type Value_NullValue struct {
- NullValue NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"`
-}
-
-type Value_NumberValue struct {
- NumberValue float64 `protobuf:"fixed64,2,opt,name=number_value,json=numberValue,proto3,oneof"`
-}
-
-type Value_StringValue struct {
- StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,proto3,oneof"`
-}
-
-type Value_BoolValue struct {
- BoolValue bool `protobuf:"varint,4,opt,name=bool_value,json=boolValue,proto3,oneof"`
-}
-
-type Value_StructValue struct {
- StructValue *Struct `protobuf:"bytes,5,opt,name=struct_value,json=structValue,proto3,oneof"`
-}
-
-type Value_ListValue struct {
- ListValue *ListValue `protobuf:"bytes,6,opt,name=list_value,json=listValue,proto3,oneof"`
-}
-
-func (*Value_NullValue) isValue_Kind() {}
-
-func (*Value_NumberValue) isValue_Kind() {}
-
-func (*Value_StringValue) isValue_Kind() {}
-
-func (*Value_BoolValue) isValue_Kind() {}
-
-func (*Value_StructValue) isValue_Kind() {}
-
-func (*Value_ListValue) isValue_Kind() {}
-
-func (m *Value) GetKind() isValue_Kind {
- if m != nil {
- return m.Kind
- }
- return nil
-}
-
-func (m *Value) GetNullValue() NullValue {
- if x, ok := m.GetKind().(*Value_NullValue); ok {
- return x.NullValue
- }
- return NullValue_NULL_VALUE
-}
-
-func (m *Value) GetNumberValue() float64 {
- if x, ok := m.GetKind().(*Value_NumberValue); ok {
- return x.NumberValue
- }
- return 0
-}
-
-func (m *Value) GetStringValue() string {
- if x, ok := m.GetKind().(*Value_StringValue); ok {
- return x.StringValue
- }
- return ""
-}
-
-func (m *Value) GetBoolValue() bool {
- if x, ok := m.GetKind().(*Value_BoolValue); ok {
- return x.BoolValue
- }
- return false
-}
-
-func (m *Value) GetStructValue() *Struct {
- if x, ok := m.GetKind().(*Value_StructValue); ok {
- return x.StructValue
- }
- return nil
-}
-
-func (m *Value) GetListValue() *ListValue {
- if x, ok := m.GetKind().(*Value_ListValue); ok {
- return x.ListValue
- }
- return nil
-}
-
-// XXX_OneofWrappers is for the internal use of the proto package.
-func (*Value) XXX_OneofWrappers() []interface{} {
- return []interface{}{
- (*Value_NullValue)(nil),
- (*Value_NumberValue)(nil),
- (*Value_StringValue)(nil),
- (*Value_BoolValue)(nil),
- (*Value_StructValue)(nil),
- (*Value_ListValue)(nil),
- }
-}
-
-// `ListValue` is a wrapper around a repeated field of values.
-//
-// The JSON representation for `ListValue` is JSON array.
-type ListValue struct {
- // Repeated field of dynamically typed values.
- Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *ListValue) Reset() { *m = ListValue{} }
-func (m *ListValue) String() string { return proto.CompactTextString(m) }
-func (*ListValue) ProtoMessage() {}
-func (*ListValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_df322afd6c9fb402, []int{2}
-}
-
-func (*ListValue) XXX_WellKnownType() string { return "ListValue" }
-
-func (m *ListValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ListValue.Unmarshal(m, b)
-}
-func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ListValue.Marshal(b, m, deterministic)
-}
-func (m *ListValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ListValue.Merge(m, src)
-}
-func (m *ListValue) XXX_Size() int {
- return xxx_messageInfo_ListValue.Size(m)
-}
-func (m *ListValue) XXX_DiscardUnknown() {
- xxx_messageInfo_ListValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_ListValue proto.InternalMessageInfo
-
-func (m *ListValue) GetValues() []*Value {
- if m != nil {
- return m.Values
- }
- return nil
-}
-
-func init() {
- proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value)
- proto.RegisterType((*Struct)(nil), "google.protobuf.Struct")
- proto.RegisterMapType((map[string]*Value)(nil), "google.protobuf.Struct.FieldsEntry")
- proto.RegisterType((*Value)(nil), "google.protobuf.Value")
- proto.RegisterType((*ListValue)(nil), "google.protobuf.ListValue")
-}
-
-func init() { proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor_df322afd6c9fb402) }
-
-var fileDescriptor_df322afd6c9fb402 = []byte{
- // 417 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x41, 0x8b, 0xd3, 0x40,
- 0x14, 0xc7, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa2, 0xa1, 0x7b, 0x09,
- 0x22, 0x29, 0xd6, 0x8b, 0x18, 0x2f, 0x06, 0xd6, 0x5d, 0x30, 0x2c, 0x31, 0xba, 0x15, 0xbc, 0x94,
- 0x26, 0x4d, 0x63, 0xe8, 0x74, 0x26, 0x24, 0x33, 0x4a, 0x8f, 0x7e, 0x0b, 0xcf, 0x1e, 0x3d, 0xfa,
- 0xe9, 0x3c, 0xca, 0xcc, 0x24, 0xa9, 0xb4, 0xf4, 0x94, 0xbc, 0xf7, 0x7e, 0xef, 0x3f, 0xef, 0xff,
- 0x66, 0xe0, 0x71, 0xc1, 0x58, 0x41, 0xf2, 0x49, 0x55, 0x33, 0xce, 0x52, 0xb1, 0x9a, 0x34, 0xbc,
- 0x16, 0x19, 0xf7, 0x55, 0x8c, 0xef, 0xe9, 0xaa, 0xdf, 0x55, 0xc7, 0x3f, 0x11, 0x58, 0x1f, 0x15,
- 0x81, 0x03, 0xb0, 0x56, 0x65, 0x4e, 0x96, 0xcd, 0x08, 0xb9, 0xa6, 0xe7, 0x4c, 0x2f, 0xfc, 0x3d,
- 0xd8, 0xd7, 0xa0, 0xff, 0x4e, 0x51, 0x97, 0x94, 0xd7, 0xdb, 0xa4, 0x6d, 0x39, 0xff, 0x00, 0xce,
- 0x7f, 0x69, 0x7c, 0x06, 0xe6, 0x3a, 0xdf, 0x8e, 0x90, 0x8b, 0x3c, 0x3b, 0x91, 0xbf, 0xf8, 0x39,
- 0x0c, 0xbf, 0x2d, 0x88, 0xc8, 0x47, 0x86, 0x8b, 0x3c, 0x67, 0xfa, 0xe0, 0x40, 0x7c, 0x26, 0xab,
- 0x89, 0x86, 0x5e, 0x1b, 0xaf, 0xd0, 0xf8, 0x8f, 0x01, 0x43, 0x95, 0xc4, 0x01, 0x00, 0x15, 0x84,
- 0xcc, 0xb5, 0x80, 0x14, 0x3d, 0x9d, 0x9e, 0x1f, 0x08, 0xdc, 0x08, 0x42, 0x14, 0x7f, 0x3d, 0x48,
- 0x6c, 0xda, 0x05, 0xf8, 0x02, 0xee, 0x52, 0xb1, 0x49, 0xf3, 0x7a, 0xbe, 0x3b, 0x1f, 0x5d, 0x0f,
- 0x12, 0x47, 0x67, 0x7b, 0xa8, 0xe1, 0x75, 0x49, 0x8b, 0x16, 0x32, 0xe5, 0xe0, 0x12, 0xd2, 0x59,
- 0x0d, 0x3d, 0x05, 0x48, 0x19, 0xeb, 0xc6, 0x38, 0x71, 0x91, 0x77, 0x47, 0x1e, 0x25, 0x73, 0x1a,
- 0x78, 0xa3, 0x54, 0x44, 0xc6, 0x5b, 0x64, 0xa8, 0xac, 0x3e, 0x3c, 0xb2, 0xc7, 0x56, 0x5e, 0x64,
- 0xbc, 0x77, 0x49, 0xca, 0xa6, 0xeb, 0xb5, 0x54, 0xef, 0xa1, 0xcb, 0xa8, 0x6c, 0x78, 0xef, 0x92,
- 0x74, 0x41, 0x68, 0xc1, 0xc9, 0xba, 0xa4, 0xcb, 0x71, 0x00, 0x76, 0x4f, 0x60, 0x1f, 0x2c, 0x25,
- 0xd6, 0xdd, 0xe8, 0xb1, 0xa5, 0xb7, 0xd4, 0xb3, 0x47, 0x60, 0xf7, 0x4b, 0xc4, 0xa7, 0x00, 0x37,
- 0xb7, 0x51, 0x34, 0x9f, 0xbd, 0x8d, 0x6e, 0x2f, 0xcf, 0x06, 0xe1, 0x0f, 0x04, 0xf7, 0x33, 0xb6,
- 0xd9, 0x97, 0x08, 0x1d, 0xed, 0x26, 0x96, 0x71, 0x8c, 0xbe, 0xbc, 0x28, 0x4a, 0xfe, 0x55, 0xa4,
- 0x7e, 0xc6, 0x36, 0x93, 0x82, 0x91, 0x05, 0x2d, 0x76, 0x4f, 0xb1, 0xe2, 0xdb, 0x2a, 0x6f, 0xda,
- 0x17, 0x19, 0xe8, 0x4f, 0x95, 0xfe, 0x45, 0xe8, 0x97, 0x61, 0x5e, 0xc5, 0xe1, 0x6f, 0xe3, 0xc9,
- 0x95, 0x16, 0x8f, 0xbb, 0xf9, 0x3e, 0xe7, 0x84, 0xbc, 0xa7, 0xec, 0x3b, 0xfd, 0x24, 0x3b, 0x53,
- 0x4b, 0x49, 0xbd, 0xfc, 0x17, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x1b, 0x59, 0xf8, 0xe5, 0x02, 0x00,
- 0x00,
-}
diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto b/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto
deleted file mode 100644
index 7d7808e7f..000000000
--- a/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto
+++ /dev/null
@@ -1,96 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto3";
-
-package google.protobuf;
-
-option csharp_namespace = "Google.Protobuf.WellKnownTypes";
-option cc_enable_arenas = true;
-option go_package = "github.com/golang/protobuf/ptypes/struct;structpb";
-option java_package = "com.google.protobuf";
-option java_outer_classname = "StructProto";
-option java_multiple_files = true;
-option objc_class_prefix = "GPB";
-
-
-// `Struct` represents a structured data value, consisting of fields
-// which map to dynamically typed values. In some languages, `Struct`
-// might be supported by a native representation. For example, in
-// scripting languages like JS a struct is represented as an
-// object. The details of that representation are described together
-// with the proto support for the language.
-//
-// The JSON representation for `Struct` is JSON object.
-message Struct {
- // Unordered map of dynamically typed values.
- map fields = 1;
-}
-
-// `Value` represents a dynamically typed value which can be either
-// null, a number, a string, a boolean, a recursive struct value, or a
-// list of values. A producer of value is expected to set one of that
-// variants, absence of any variant indicates an error.
-//
-// The JSON representation for `Value` is JSON value.
-message Value {
- // The kind of value.
- oneof kind {
- // Represents a null value.
- NullValue null_value = 1;
- // Represents a double value.
- double number_value = 2;
- // Represents a string value.
- string string_value = 3;
- // Represents a boolean value.
- bool bool_value = 4;
- // Represents a structured value.
- Struct struct_value = 5;
- // Represents a repeated `Value`.
- ListValue list_value = 6;
- }
-}
-
-// `NullValue` is a singleton enumeration to represent the null value for the
-// `Value` type union.
-//
-// The JSON representation for `NullValue` is JSON `null`.
-enum NullValue {
- // Null value.
- NULL_VALUE = 0;
-}
-
-// `ListValue` is a wrapper around a repeated field of values.
-//
-// The JSON representation for `ListValue` is JSON array.
-message ListValue {
- // Repeated field of dynamically typed values.
- repeated Value values = 1;
-}
diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go
deleted file mode 100644
index add19a1ad..000000000
--- a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go
+++ /dev/null
@@ -1,461 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: google/protobuf/wrappers.proto
-
-package wrappers
-
-import (
- fmt "fmt"
- proto "github.com/golang/protobuf/proto"
- math "math"
-)
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
-
-// Wrapper message for `double`.
-//
-// The JSON representation for `DoubleValue` is JSON number.
-type DoubleValue struct {
- // The double value.
- Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *DoubleValue) Reset() { *m = DoubleValue{} }
-func (m *DoubleValue) String() string { return proto.CompactTextString(m) }
-func (*DoubleValue) ProtoMessage() {}
-func (*DoubleValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{0}
-}
-
-func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" }
-
-func (m *DoubleValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DoubleValue.Unmarshal(m, b)
-}
-func (m *DoubleValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DoubleValue.Marshal(b, m, deterministic)
-}
-func (m *DoubleValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DoubleValue.Merge(m, src)
-}
-func (m *DoubleValue) XXX_Size() int {
- return xxx_messageInfo_DoubleValue.Size(m)
-}
-func (m *DoubleValue) XXX_DiscardUnknown() {
- xxx_messageInfo_DoubleValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_DoubleValue proto.InternalMessageInfo
-
-func (m *DoubleValue) GetValue() float64 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `float`.
-//
-// The JSON representation for `FloatValue` is JSON number.
-type FloatValue struct {
- // The float value.
- Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *FloatValue) Reset() { *m = FloatValue{} }
-func (m *FloatValue) String() string { return proto.CompactTextString(m) }
-func (*FloatValue) ProtoMessage() {}
-func (*FloatValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{1}
-}
-
-func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" }
-
-func (m *FloatValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_FloatValue.Unmarshal(m, b)
-}
-func (m *FloatValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_FloatValue.Marshal(b, m, deterministic)
-}
-func (m *FloatValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_FloatValue.Merge(m, src)
-}
-func (m *FloatValue) XXX_Size() int {
- return xxx_messageInfo_FloatValue.Size(m)
-}
-func (m *FloatValue) XXX_DiscardUnknown() {
- xxx_messageInfo_FloatValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_FloatValue proto.InternalMessageInfo
-
-func (m *FloatValue) GetValue() float32 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `int64`.
-//
-// The JSON representation for `Int64Value` is JSON string.
-type Int64Value struct {
- // The int64 value.
- Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Int64Value) Reset() { *m = Int64Value{} }
-func (m *Int64Value) String() string { return proto.CompactTextString(m) }
-func (*Int64Value) ProtoMessage() {}
-func (*Int64Value) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{2}
-}
-
-func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" }
-
-func (m *Int64Value) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Int64Value.Unmarshal(m, b)
-}
-func (m *Int64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Int64Value.Marshal(b, m, deterministic)
-}
-func (m *Int64Value) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Int64Value.Merge(m, src)
-}
-func (m *Int64Value) XXX_Size() int {
- return xxx_messageInfo_Int64Value.Size(m)
-}
-func (m *Int64Value) XXX_DiscardUnknown() {
- xxx_messageInfo_Int64Value.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Int64Value proto.InternalMessageInfo
-
-func (m *Int64Value) GetValue() int64 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `uint64`.
-//
-// The JSON representation for `UInt64Value` is JSON string.
-type UInt64Value struct {
- // The uint64 value.
- Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *UInt64Value) Reset() { *m = UInt64Value{} }
-func (m *UInt64Value) String() string { return proto.CompactTextString(m) }
-func (*UInt64Value) ProtoMessage() {}
-func (*UInt64Value) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{3}
-}
-
-func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" }
-
-func (m *UInt64Value) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_UInt64Value.Unmarshal(m, b)
-}
-func (m *UInt64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_UInt64Value.Marshal(b, m, deterministic)
-}
-func (m *UInt64Value) XXX_Merge(src proto.Message) {
- xxx_messageInfo_UInt64Value.Merge(m, src)
-}
-func (m *UInt64Value) XXX_Size() int {
- return xxx_messageInfo_UInt64Value.Size(m)
-}
-func (m *UInt64Value) XXX_DiscardUnknown() {
- xxx_messageInfo_UInt64Value.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_UInt64Value proto.InternalMessageInfo
-
-func (m *UInt64Value) GetValue() uint64 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `int32`.
-//
-// The JSON representation for `Int32Value` is JSON number.
-type Int32Value struct {
- // The int32 value.
- Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *Int32Value) Reset() { *m = Int32Value{} }
-func (m *Int32Value) String() string { return proto.CompactTextString(m) }
-func (*Int32Value) ProtoMessage() {}
-func (*Int32Value) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{4}
-}
-
-func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" }
-
-func (m *Int32Value) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Int32Value.Unmarshal(m, b)
-}
-func (m *Int32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Int32Value.Marshal(b, m, deterministic)
-}
-func (m *Int32Value) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Int32Value.Merge(m, src)
-}
-func (m *Int32Value) XXX_Size() int {
- return xxx_messageInfo_Int32Value.Size(m)
-}
-func (m *Int32Value) XXX_DiscardUnknown() {
- xxx_messageInfo_Int32Value.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_Int32Value proto.InternalMessageInfo
-
-func (m *Int32Value) GetValue() int32 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `uint32`.
-//
-// The JSON representation for `UInt32Value` is JSON number.
-type UInt32Value struct {
- // The uint32 value.
- Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *UInt32Value) Reset() { *m = UInt32Value{} }
-func (m *UInt32Value) String() string { return proto.CompactTextString(m) }
-func (*UInt32Value) ProtoMessage() {}
-func (*UInt32Value) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{5}
-}
-
-func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" }
-
-func (m *UInt32Value) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_UInt32Value.Unmarshal(m, b)
-}
-func (m *UInt32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_UInt32Value.Marshal(b, m, deterministic)
-}
-func (m *UInt32Value) XXX_Merge(src proto.Message) {
- xxx_messageInfo_UInt32Value.Merge(m, src)
-}
-func (m *UInt32Value) XXX_Size() int {
- return xxx_messageInfo_UInt32Value.Size(m)
-}
-func (m *UInt32Value) XXX_DiscardUnknown() {
- xxx_messageInfo_UInt32Value.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_UInt32Value proto.InternalMessageInfo
-
-func (m *UInt32Value) GetValue() uint32 {
- if m != nil {
- return m.Value
- }
- return 0
-}
-
-// Wrapper message for `bool`.
-//
-// The JSON representation for `BoolValue` is JSON `true` and `false`.
-type BoolValue struct {
- // The bool value.
- Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *BoolValue) Reset() { *m = BoolValue{} }
-func (m *BoolValue) String() string { return proto.CompactTextString(m) }
-func (*BoolValue) ProtoMessage() {}
-func (*BoolValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{6}
-}
-
-func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" }
-
-func (m *BoolValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_BoolValue.Unmarshal(m, b)
-}
-func (m *BoolValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_BoolValue.Marshal(b, m, deterministic)
-}
-func (m *BoolValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_BoolValue.Merge(m, src)
-}
-func (m *BoolValue) XXX_Size() int {
- return xxx_messageInfo_BoolValue.Size(m)
-}
-func (m *BoolValue) XXX_DiscardUnknown() {
- xxx_messageInfo_BoolValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_BoolValue proto.InternalMessageInfo
-
-func (m *BoolValue) GetValue() bool {
- if m != nil {
- return m.Value
- }
- return false
-}
-
-// Wrapper message for `string`.
-//
-// The JSON representation for `StringValue` is JSON string.
-type StringValue struct {
- // The string value.
- Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *StringValue) Reset() { *m = StringValue{} }
-func (m *StringValue) String() string { return proto.CompactTextString(m) }
-func (*StringValue) ProtoMessage() {}
-func (*StringValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{7}
-}
-
-func (*StringValue) XXX_WellKnownType() string { return "StringValue" }
-
-func (m *StringValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_StringValue.Unmarshal(m, b)
-}
-func (m *StringValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_StringValue.Marshal(b, m, deterministic)
-}
-func (m *StringValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_StringValue.Merge(m, src)
-}
-func (m *StringValue) XXX_Size() int {
- return xxx_messageInfo_StringValue.Size(m)
-}
-func (m *StringValue) XXX_DiscardUnknown() {
- xxx_messageInfo_StringValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_StringValue proto.InternalMessageInfo
-
-func (m *StringValue) GetValue() string {
- if m != nil {
- return m.Value
- }
- return ""
-}
-
-// Wrapper message for `bytes`.
-//
-// The JSON representation for `BytesValue` is JSON string.
-type BytesValue struct {
- // The bytes value.
- Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *BytesValue) Reset() { *m = BytesValue{} }
-func (m *BytesValue) String() string { return proto.CompactTextString(m) }
-func (*BytesValue) ProtoMessage() {}
-func (*BytesValue) Descriptor() ([]byte, []int) {
- return fileDescriptor_5377b62bda767935, []int{8}
-}
-
-func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" }
-
-func (m *BytesValue) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_BytesValue.Unmarshal(m, b)
-}
-func (m *BytesValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_BytesValue.Marshal(b, m, deterministic)
-}
-func (m *BytesValue) XXX_Merge(src proto.Message) {
- xxx_messageInfo_BytesValue.Merge(m, src)
-}
-func (m *BytesValue) XXX_Size() int {
- return xxx_messageInfo_BytesValue.Size(m)
-}
-func (m *BytesValue) XXX_DiscardUnknown() {
- xxx_messageInfo_BytesValue.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_BytesValue proto.InternalMessageInfo
-
-func (m *BytesValue) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func init() {
- proto.RegisterType((*DoubleValue)(nil), "google.protobuf.DoubleValue")
- proto.RegisterType((*FloatValue)(nil), "google.protobuf.FloatValue")
- proto.RegisterType((*Int64Value)(nil), "google.protobuf.Int64Value")
- proto.RegisterType((*UInt64Value)(nil), "google.protobuf.UInt64Value")
- proto.RegisterType((*Int32Value)(nil), "google.protobuf.Int32Value")
- proto.RegisterType((*UInt32Value)(nil), "google.protobuf.UInt32Value")
- proto.RegisterType((*BoolValue)(nil), "google.protobuf.BoolValue")
- proto.RegisterType((*StringValue)(nil), "google.protobuf.StringValue")
- proto.RegisterType((*BytesValue)(nil), "google.protobuf.BytesValue")
-}
-
-func init() { proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor_5377b62bda767935) }
-
-var fileDescriptor_5377b62bda767935 = []byte{
- // 259 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
- 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c,
- 0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca,
- 0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c,
- 0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5,
- 0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13,
- 0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8,
- 0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca,
- 0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a,
- 0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x0d,
- 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x5a, 0xe8, 0x3a, 0xf1, 0x86, 0x43, 0x83, 0x3f, 0x00, 0x24,
- 0x12, 0xc0, 0x18, 0xa5, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x9f,
- 0x9e, 0x9f, 0x93, 0x98, 0x97, 0x8e, 0x88, 0xaa, 0x82, 0x92, 0xca, 0x82, 0xd4, 0x62, 0x78, 0x8c,
- 0xfd, 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e,
- 0x00, 0x54, 0xa9, 0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b,
- 0x12, 0x1b, 0xd8, 0x0c, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x19, 0x6c, 0xb9, 0xb8, 0xfe,
- 0x01, 0x00, 0x00,
-}
diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto
deleted file mode 100644
index 01947639a..000000000
--- a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto
+++ /dev/null
@@ -1,118 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Wrappers for primitive (non-message) types. These types are useful
-// for embedding primitives in the `google.protobuf.Any` type and for places
-// where we need to distinguish between the absence of a primitive
-// typed field and its default value.
-
-syntax = "proto3";
-
-package google.protobuf;
-
-option csharp_namespace = "Google.Protobuf.WellKnownTypes";
-option cc_enable_arenas = true;
-option go_package = "github.com/golang/protobuf/ptypes/wrappers";
-option java_package = "com.google.protobuf";
-option java_outer_classname = "WrappersProto";
-option java_multiple_files = true;
-option objc_class_prefix = "GPB";
-
-// Wrapper message for `double`.
-//
-// The JSON representation for `DoubleValue` is JSON number.
-message DoubleValue {
- // The double value.
- double value = 1;
-}
-
-// Wrapper message for `float`.
-//
-// The JSON representation for `FloatValue` is JSON number.
-message FloatValue {
- // The float value.
- float value = 1;
-}
-
-// Wrapper message for `int64`.
-//
-// The JSON representation for `Int64Value` is JSON string.
-message Int64Value {
- // The int64 value.
- int64 value = 1;
-}
-
-// Wrapper message for `uint64`.
-//
-// The JSON representation for `UInt64Value` is JSON string.
-message UInt64Value {
- // The uint64 value.
- uint64 value = 1;
-}
-
-// Wrapper message for `int32`.
-//
-// The JSON representation for `Int32Value` is JSON number.
-message Int32Value {
- // The int32 value.
- int32 value = 1;
-}
-
-// Wrapper message for `uint32`.
-//
-// The JSON representation for `UInt32Value` is JSON number.
-message UInt32Value {
- // The uint32 value.
- uint32 value = 1;
-}
-
-// Wrapper message for `bool`.
-//
-// The JSON representation for `BoolValue` is JSON `true` and `false`.
-message BoolValue {
- // The bool value.
- bool value = 1;
-}
-
-// Wrapper message for `string`.
-//
-// The JSON representation for `StringValue` is JSON string.
-message StringValue {
- // The string value.
- string value = 1;
-}
-
-// Wrapper message for `bytes`.
-//
-// The JSON representation for `BytesValue` is JSON string.
-message BytesValue {
- // The bytes value.
- bytes value = 1;
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt b/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt
deleted file mode 100644
index 364516251..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2015, Gengo, Inc.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
- * Neither the name of Gengo, Inc. nor the names of its
- contributors may be used to endorse or promote products derived from this
- software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/BUILD.bazel
deleted file mode 100644
index 76cafe6ec..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/BUILD.bazel
+++ /dev/null
@@ -1,22 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
-
-package(default_visibility = ["//visibility:public"])
-
-proto_library(
- name = "internal_proto",
- srcs = ["stream_chunk.proto"],
- deps = ["@com_google_protobuf//:any_proto"],
-)
-
-go_proto_library(
- name = "internal_go_proto",
- importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
- proto = ":internal_proto",
-)
-
-go_library(
- name = "go_default_library",
- embed = [":internal_go_proto"],
- importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
-)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.pb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.pb.go
deleted file mode 100644
index 8858f0690..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.pb.go
+++ /dev/null
@@ -1,118 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: internal/stream_chunk.proto
-
-package internal
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-import any "github.com/golang/protobuf/ptypes/any"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
-
-// StreamError is a response type which is returned when
-// streaming rpc returns an error.
-type StreamError struct {
- GrpcCode int32 `protobuf:"varint,1,opt,name=grpc_code,json=grpcCode,proto3" json:"grpc_code,omitempty"`
- HttpCode int32 `protobuf:"varint,2,opt,name=http_code,json=httpCode,proto3" json:"http_code,omitempty"`
- Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"`
- HttpStatus string `protobuf:"bytes,4,opt,name=http_status,json=httpStatus,proto3" json:"http_status,omitempty"`
- Details []*any.Any `protobuf:"bytes,5,rep,name=details,proto3" json:"details,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *StreamError) Reset() { *m = StreamError{} }
-func (m *StreamError) String() string { return proto.CompactTextString(m) }
-func (*StreamError) ProtoMessage() {}
-func (*StreamError) Descriptor() ([]byte, []int) {
- return fileDescriptor_stream_chunk_a2afb657504565d7, []int{0}
-}
-func (m *StreamError) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_StreamError.Unmarshal(m, b)
-}
-func (m *StreamError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_StreamError.Marshal(b, m, deterministic)
-}
-func (dst *StreamError) XXX_Merge(src proto.Message) {
- xxx_messageInfo_StreamError.Merge(dst, src)
-}
-func (m *StreamError) XXX_Size() int {
- return xxx_messageInfo_StreamError.Size(m)
-}
-func (m *StreamError) XXX_DiscardUnknown() {
- xxx_messageInfo_StreamError.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_StreamError proto.InternalMessageInfo
-
-func (m *StreamError) GetGrpcCode() int32 {
- if m != nil {
- return m.GrpcCode
- }
- return 0
-}
-
-func (m *StreamError) GetHttpCode() int32 {
- if m != nil {
- return m.HttpCode
- }
- return 0
-}
-
-func (m *StreamError) GetMessage() string {
- if m != nil {
- return m.Message
- }
- return ""
-}
-
-func (m *StreamError) GetHttpStatus() string {
- if m != nil {
- return m.HttpStatus
- }
- return ""
-}
-
-func (m *StreamError) GetDetails() []*any.Any {
- if m != nil {
- return m.Details
- }
- return nil
-}
-
-func init() {
- proto.RegisterType((*StreamError)(nil), "grpc.gateway.runtime.StreamError")
-}
-
-func init() {
- proto.RegisterFile("internal/stream_chunk.proto", fileDescriptor_stream_chunk_a2afb657504565d7)
-}
-
-var fileDescriptor_stream_chunk_a2afb657504565d7 = []byte{
- // 223 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x90, 0x41, 0x4e, 0xc3, 0x30,
- 0x10, 0x45, 0x15, 0x4a, 0x69, 0x3b, 0xd9, 0x45, 0x5d, 0x18, 0xba, 0x20, 0x62, 0x95, 0x95, 0x23,
- 0xc1, 0x09, 0x00, 0x71, 0x81, 0x74, 0xc7, 0xa6, 0x9a, 0x26, 0x83, 0x13, 0x91, 0xd8, 0xd1, 0x78,
- 0x22, 0x94, 0x6b, 0x71, 0xc2, 0xca, 0x8e, 0xb2, 0xf4, 0x7b, 0x7f, 0xbe, 0xbe, 0x0c, 0xa7, 0xce,
- 0x0a, 0xb1, 0xc5, 0xbe, 0xf4, 0xc2, 0x84, 0xc3, 0xa5, 0x6e, 0x27, 0xfb, 0xab, 0x47, 0x76, 0xe2,
- 0xb2, 0xa3, 0xe1, 0xb1, 0xd6, 0x06, 0x85, 0xfe, 0x70, 0xd6, 0x3c, 0x59, 0xe9, 0x06, 0x7a, 0x7a,
- 0x34, 0xce, 0x99, 0x9e, 0xca, 0x98, 0xb9, 0x4e, 0x3f, 0x25, 0xda, 0x79, 0x39, 0x78, 0xf9, 0x4f,
- 0x20, 0x3d, 0xc7, 0x9e, 0x2f, 0x66, 0xc7, 0xd9, 0x09, 0x0e, 0xa1, 0xe2, 0x52, 0xbb, 0x86, 0x54,
- 0x92, 0x27, 0xc5, 0xb6, 0xda, 0x07, 0xf0, 0xe9, 0x1a, 0x0a, 0xb2, 0x15, 0x19, 0x17, 0x79, 0xb7,
- 0xc8, 0x00, 0xa2, 0x54, 0xb0, 0x1b, 0xc8, 0x7b, 0x34, 0xa4, 0x36, 0x79, 0x52, 0x1c, 0xaa, 0xf5,
- 0x99, 0x3d, 0x43, 0x1a, 0xcf, 0xbc, 0xa0, 0x4c, 0x5e, 0xdd, 0x47, 0x0b, 0x01, 0x9d, 0x23, 0xc9,
- 0x34, 0xec, 0x1a, 0x12, 0xec, 0x7a, 0xaf, 0xb6, 0xf9, 0xa6, 0x48, 0x5f, 0x8f, 0x7a, 0x59, 0xac,
- 0xd7, 0xc5, 0xfa, 0xdd, 0xce, 0xd5, 0x1a, 0xfa, 0x80, 0xef, 0xfd, 0xfa, 0x09, 0xd7, 0x87, 0x18,
- 0x79, 0xbb, 0x05, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x7d, 0xa5, 0x18, 0x17, 0x01, 0x00, 0x00,
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.proto b/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.proto
deleted file mode 100644
index 55f42ce63..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/internal/stream_chunk.proto
+++ /dev/null
@@ -1,15 +0,0 @@
-syntax = "proto3";
-package grpc.gateway.runtime;
-option go_package = "internal";
-
-import "google/protobuf/any.proto";
-
-// StreamError is a response type which is returned when
-// streaming rpc returns an error.
-message StreamError {
- int32 grpc_code = 1;
- int32 http_code = 2;
- string message = 3;
- string http_status = 4;
- repeated google.protobuf.Any details = 5;
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel
deleted file mode 100644
index c99f83e58..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/BUILD.bazel
+++ /dev/null
@@ -1,80 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(default_visibility = ["//visibility:public"])
-
-go_library(
- name = "go_default_library",
- srcs = [
- "context.go",
- "convert.go",
- "doc.go",
- "errors.go",
- "fieldmask.go",
- "handler.go",
- "marshal_json.go",
- "marshal_jsonpb.go",
- "marshal_proto.go",
- "marshaler.go",
- "marshaler_registry.go",
- "mux.go",
- "pattern.go",
- "proto2_convert.go",
- "proto_errors.go",
- "query.go",
- ],
- importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime",
- deps = [
- "//internal:go_default_library",
- "//utilities:go_default_library",
- "@com_github_golang_protobuf//jsonpb:go_default_library_gen",
- "@com_github_golang_protobuf//proto:go_default_library",
- "@com_github_golang_protobuf//protoc-gen-go/generator:go_default_library_gen",
- "@io_bazel_rules_go//proto/wkt:any_go_proto",
- "@io_bazel_rules_go//proto/wkt:duration_go_proto",
- "@io_bazel_rules_go//proto/wkt:field_mask_go_proto",
- "@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
- "@io_bazel_rules_go//proto/wkt:wrappers_go_proto",
- "@org_golang_google_grpc//codes:go_default_library",
- "@org_golang_google_grpc//grpclog:go_default_library",
- "@org_golang_google_grpc//metadata:go_default_library",
- "@org_golang_google_grpc//status:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "context_test.go",
- "errors_test.go",
- "fieldmask_test.go",
- "handler_test.go",
- "marshal_json_test.go",
- "marshal_jsonpb_test.go",
- "marshal_proto_test.go",
- "marshaler_registry_test.go",
- "mux_test.go",
- "pattern_test.go",
- "query_test.go",
- ],
- embed = [":go_default_library"],
- deps = [
- "//examples/proto/examplepb:go_default_library",
- "//internal:go_default_library",
- "//utilities:go_default_library",
- "@com_github_golang_protobuf//jsonpb:go_default_library_gen",
- "@com_github_golang_protobuf//proto:go_default_library",
- "@com_github_golang_protobuf//ptypes:go_default_library_gen",
- "@go_googleapis//google/rpc:errdetails_go_proto",
- "@io_bazel_rules_go//proto/wkt:duration_go_proto",
- "@io_bazel_rules_go//proto/wkt:empty_go_proto",
- "@io_bazel_rules_go//proto/wkt:field_mask_go_proto",
- "@io_bazel_rules_go//proto/wkt:struct_go_proto",
- "@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
- "@io_bazel_rules_go//proto/wkt:wrappers_go_proto",
- "@org_golang_google_grpc//:go_default_library",
- "@org_golang_google_grpc//codes:go_default_library",
- "@org_golang_google_grpc//metadata:go_default_library",
- "@org_golang_google_grpc//status:go_default_library",
- ],
-)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go
deleted file mode 100644
index 896057e1e..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/context.go
+++ /dev/null
@@ -1,210 +0,0 @@
-package runtime
-
-import (
- "context"
- "encoding/base64"
- "fmt"
- "net"
- "net/http"
- "net/textproto"
- "strconv"
- "strings"
- "time"
-
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/metadata"
- "google.golang.org/grpc/status"
-)
-
-// MetadataHeaderPrefix is the http prefix that represents custom metadata
-// parameters to or from a gRPC call.
-const MetadataHeaderPrefix = "Grpc-Metadata-"
-
-// MetadataPrefix is prepended to permanent HTTP header keys (as specified
-// by the IANA) when added to the gRPC context.
-const MetadataPrefix = "grpcgateway-"
-
-// MetadataTrailerPrefix is prepended to gRPC metadata as it is converted to
-// HTTP headers in a response handled by grpc-gateway
-const MetadataTrailerPrefix = "Grpc-Trailer-"
-
-const metadataGrpcTimeout = "Grpc-Timeout"
-const metadataHeaderBinarySuffix = "-Bin"
-
-const xForwardedFor = "X-Forwarded-For"
-const xForwardedHost = "X-Forwarded-Host"
-
-var (
- // DefaultContextTimeout is used for gRPC call context.WithTimeout whenever a Grpc-Timeout inbound
- // header isn't present. If the value is 0 the sent `context` will not have a timeout.
- DefaultContextTimeout = 0 * time.Second
-)
-
-func decodeBinHeader(v string) ([]byte, error) {
- if len(v)%4 == 0 {
- // Input was padded, or padding was not necessary.
- return base64.StdEncoding.DecodeString(v)
- }
- return base64.RawStdEncoding.DecodeString(v)
-}
-
-/*
-AnnotateContext adds context information such as metadata from the request.
-
-At a minimum, the RemoteAddr is included in the fashion of "X-Forwarded-For",
-except that the forwarded destination is not another HTTP service but rather
-a gRPC service.
-*/
-func AnnotateContext(ctx context.Context, mux *ServeMux, req *http.Request) (context.Context, error) {
- var pairs []string
- timeout := DefaultContextTimeout
- if tm := req.Header.Get(metadataGrpcTimeout); tm != "" {
- var err error
- timeout, err = timeoutDecode(tm)
- if err != nil {
- return nil, status.Errorf(codes.InvalidArgument, "invalid grpc-timeout: %s", tm)
- }
- }
-
- for key, vals := range req.Header {
- for _, val := range vals {
- key = textproto.CanonicalMIMEHeaderKey(key)
- // For backwards-compatibility, pass through 'authorization' header with no prefix.
- if key == "Authorization" {
- pairs = append(pairs, "authorization", val)
- }
- if h, ok := mux.incomingHeaderMatcher(key); ok {
- // Handles "-bin" metadata in grpc, since grpc will do another base64
- // encode before sending to server, we need to decode it first.
- if strings.HasSuffix(key, metadataHeaderBinarySuffix) {
- b, err := decodeBinHeader(val)
- if err != nil {
- return nil, status.Errorf(codes.InvalidArgument, "invalid binary header %s: %s", key, err)
- }
-
- val = string(b)
- }
- pairs = append(pairs, h, val)
- }
- }
- }
- if host := req.Header.Get(xForwardedHost); host != "" {
- pairs = append(pairs, strings.ToLower(xForwardedHost), host)
- } else if req.Host != "" {
- pairs = append(pairs, strings.ToLower(xForwardedHost), req.Host)
- }
-
- if addr := req.RemoteAddr; addr != "" {
- if remoteIP, _, err := net.SplitHostPort(addr); err == nil {
- if fwd := req.Header.Get(xForwardedFor); fwd == "" {
- pairs = append(pairs, strings.ToLower(xForwardedFor), remoteIP)
- } else {
- pairs = append(pairs, strings.ToLower(xForwardedFor), fmt.Sprintf("%s, %s", fwd, remoteIP))
- }
- } else {
- grpclog.Infof("invalid remote addr: %s", addr)
- }
- }
-
- if timeout != 0 {
- ctx, _ = context.WithTimeout(ctx, timeout)
- }
- if len(pairs) == 0 {
- return ctx, nil
- }
- md := metadata.Pairs(pairs...)
- for _, mda := range mux.metadataAnnotators {
- md = metadata.Join(md, mda(ctx, req))
- }
- return metadata.NewOutgoingContext(ctx, md), nil
-}
-
-// ServerMetadata consists of metadata sent from gRPC server.
-type ServerMetadata struct {
- HeaderMD metadata.MD
- TrailerMD metadata.MD
-}
-
-type serverMetadataKey struct{}
-
-// NewServerMetadataContext creates a new context with ServerMetadata
-func NewServerMetadataContext(ctx context.Context, md ServerMetadata) context.Context {
- return context.WithValue(ctx, serverMetadataKey{}, md)
-}
-
-// ServerMetadataFromContext returns the ServerMetadata in ctx
-func ServerMetadataFromContext(ctx context.Context) (md ServerMetadata, ok bool) {
- md, ok = ctx.Value(serverMetadataKey{}).(ServerMetadata)
- return
-}
-
-func timeoutDecode(s string) (time.Duration, error) {
- size := len(s)
- if size < 2 {
- return 0, fmt.Errorf("timeout string is too short: %q", s)
- }
- d, ok := timeoutUnitToDuration(s[size-1])
- if !ok {
- return 0, fmt.Errorf("timeout unit is not recognized: %q", s)
- }
- t, err := strconv.ParseInt(s[:size-1], 10, 64)
- if err != nil {
- return 0, err
- }
- return d * time.Duration(t), nil
-}
-
-func timeoutUnitToDuration(u uint8) (d time.Duration, ok bool) {
- switch u {
- case 'H':
- return time.Hour, true
- case 'M':
- return time.Minute, true
- case 'S':
- return time.Second, true
- case 'm':
- return time.Millisecond, true
- case 'u':
- return time.Microsecond, true
- case 'n':
- return time.Nanosecond, true
- default:
- }
- return
-}
-
-// isPermanentHTTPHeader checks whether hdr belongs to the list of
-// permenant request headers maintained by IANA.
-// http://www.iana.org/assignments/message-headers/message-headers.xml
-func isPermanentHTTPHeader(hdr string) bool {
- switch hdr {
- case
- "Accept",
- "Accept-Charset",
- "Accept-Language",
- "Accept-Ranges",
- "Authorization",
- "Cache-Control",
- "Content-Type",
- "Cookie",
- "Date",
- "Expect",
- "From",
- "Host",
- "If-Match",
- "If-Modified-Since",
- "If-None-Match",
- "If-Schedule-Tag-Match",
- "If-Unmodified-Since",
- "Max-Forwards",
- "Origin",
- "Pragma",
- "Referer",
- "User-Agent",
- "Via",
- "Warning":
- return true
- }
- return false
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go
deleted file mode 100644
index a5b3bd6a7..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/convert.go
+++ /dev/null
@@ -1,312 +0,0 @@
-package runtime
-
-import (
- "encoding/base64"
- "fmt"
- "strconv"
- "strings"
-
- "github.com/golang/protobuf/jsonpb"
- "github.com/golang/protobuf/ptypes/duration"
- "github.com/golang/protobuf/ptypes/timestamp"
- "github.com/golang/protobuf/ptypes/wrappers"
-)
-
-// String just returns the given string.
-// It is just for compatibility to other types.
-func String(val string) (string, error) {
- return val, nil
-}
-
-// StringSlice converts 'val' where individual strings are separated by
-// 'sep' into a string slice.
-func StringSlice(val, sep string) ([]string, error) {
- return strings.Split(val, sep), nil
-}
-
-// Bool converts the given string representation of a boolean value into bool.
-func Bool(val string) (bool, error) {
- return strconv.ParseBool(val)
-}
-
-// BoolSlice converts 'val' where individual booleans are separated by
-// 'sep' into a bool slice.
-func BoolSlice(val, sep string) ([]bool, error) {
- s := strings.Split(val, sep)
- values := make([]bool, len(s))
- for i, v := range s {
- value, err := Bool(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Float64 converts the given string representation into representation of a floating point number into float64.
-func Float64(val string) (float64, error) {
- return strconv.ParseFloat(val, 64)
-}
-
-// Float64Slice converts 'val' where individual floating point numbers are separated by
-// 'sep' into a float64 slice.
-func Float64Slice(val, sep string) ([]float64, error) {
- s := strings.Split(val, sep)
- values := make([]float64, len(s))
- for i, v := range s {
- value, err := Float64(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Float32 converts the given string representation of a floating point number into float32.
-func Float32(val string) (float32, error) {
- f, err := strconv.ParseFloat(val, 32)
- if err != nil {
- return 0, err
- }
- return float32(f), nil
-}
-
-// Float32Slice converts 'val' where individual floating point numbers are separated by
-// 'sep' into a float32 slice.
-func Float32Slice(val, sep string) ([]float32, error) {
- s := strings.Split(val, sep)
- values := make([]float32, len(s))
- for i, v := range s {
- value, err := Float32(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Int64 converts the given string representation of an integer into int64.
-func Int64(val string) (int64, error) {
- return strconv.ParseInt(val, 0, 64)
-}
-
-// Int64Slice converts 'val' where individual integers are separated by
-// 'sep' into a int64 slice.
-func Int64Slice(val, sep string) ([]int64, error) {
- s := strings.Split(val, sep)
- values := make([]int64, len(s))
- for i, v := range s {
- value, err := Int64(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Int32 converts the given string representation of an integer into int32.
-func Int32(val string) (int32, error) {
- i, err := strconv.ParseInt(val, 0, 32)
- if err != nil {
- return 0, err
- }
- return int32(i), nil
-}
-
-// Int32Slice converts 'val' where individual integers are separated by
-// 'sep' into a int32 slice.
-func Int32Slice(val, sep string) ([]int32, error) {
- s := strings.Split(val, sep)
- values := make([]int32, len(s))
- for i, v := range s {
- value, err := Int32(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Uint64 converts the given string representation of an integer into uint64.
-func Uint64(val string) (uint64, error) {
- return strconv.ParseUint(val, 0, 64)
-}
-
-// Uint64Slice converts 'val' where individual integers are separated by
-// 'sep' into a uint64 slice.
-func Uint64Slice(val, sep string) ([]uint64, error) {
- s := strings.Split(val, sep)
- values := make([]uint64, len(s))
- for i, v := range s {
- value, err := Uint64(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Uint32 converts the given string representation of an integer into uint32.
-func Uint32(val string) (uint32, error) {
- i, err := strconv.ParseUint(val, 0, 32)
- if err != nil {
- return 0, err
- }
- return uint32(i), nil
-}
-
-// Uint32Slice converts 'val' where individual integers are separated by
-// 'sep' into a uint32 slice.
-func Uint32Slice(val, sep string) ([]uint32, error) {
- s := strings.Split(val, sep)
- values := make([]uint32, len(s))
- for i, v := range s {
- value, err := Uint32(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Bytes converts the given string representation of a byte sequence into a slice of bytes
-// A bytes sequence is encoded in URL-safe base64 without padding
-func Bytes(val string) ([]byte, error) {
- b, err := base64.StdEncoding.DecodeString(val)
- if err != nil {
- b, err = base64.URLEncoding.DecodeString(val)
- if err != nil {
- return nil, err
- }
- }
- return b, nil
-}
-
-// BytesSlice converts 'val' where individual bytes sequences, encoded in URL-safe
-// base64 without padding, are separated by 'sep' into a slice of bytes slices slice.
-func BytesSlice(val, sep string) ([][]byte, error) {
- s := strings.Split(val, sep)
- values := make([][]byte, len(s))
- for i, v := range s {
- value, err := Bytes(v)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-// Timestamp converts the given RFC3339 formatted string into a timestamp.Timestamp.
-func Timestamp(val string) (*timestamp.Timestamp, error) {
- var r *timestamp.Timestamp
- err := jsonpb.UnmarshalString(val, r)
- return r, err
-}
-
-// Duration converts the given string into a timestamp.Duration.
-func Duration(val string) (*duration.Duration, error) {
- var r *duration.Duration
- err := jsonpb.UnmarshalString(val, r)
- return r, err
-}
-
-// Enum converts the given string into an int32 that should be type casted into the
-// correct enum proto type.
-func Enum(val string, enumValMap map[string]int32) (int32, error) {
- e, ok := enumValMap[val]
- if ok {
- return e, nil
- }
-
- i, err := Int32(val)
- if err != nil {
- return 0, fmt.Errorf("%s is not valid", val)
- }
- for _, v := range enumValMap {
- if v == i {
- return i, nil
- }
- }
- return 0, fmt.Errorf("%s is not valid", val)
-}
-
-// EnumSlice converts 'val' where individual enums are separated by 'sep'
-// into a int32 slice. Each individual int32 should be type casted into the
-// correct enum proto type.
-func EnumSlice(val, sep string, enumValMap map[string]int32) ([]int32, error) {
- s := strings.Split(val, sep)
- values := make([]int32, len(s))
- for i, v := range s {
- value, err := Enum(v, enumValMap)
- if err != nil {
- return values, err
- }
- values[i] = value
- }
- return values, nil
-}
-
-/*
- Support fot google.protobuf.wrappers on top of primitive types
-*/
-
-// StringValue well-known type support as wrapper around string type
-func StringValue(val string) (*wrappers.StringValue, error) {
- return &wrappers.StringValue{Value: val}, nil
-}
-
-// FloatValue well-known type support as wrapper around float32 type
-func FloatValue(val string) (*wrappers.FloatValue, error) {
- parsedVal, err := Float32(val)
- return &wrappers.FloatValue{Value: parsedVal}, err
-}
-
-// DoubleValue well-known type support as wrapper around float64 type
-func DoubleValue(val string) (*wrappers.DoubleValue, error) {
- parsedVal, err := Float64(val)
- return &wrappers.DoubleValue{Value: parsedVal}, err
-}
-
-// BoolValue well-known type support as wrapper around bool type
-func BoolValue(val string) (*wrappers.BoolValue, error) {
- parsedVal, err := Bool(val)
- return &wrappers.BoolValue{Value: parsedVal}, err
-}
-
-// Int32Value well-known type support as wrapper around int32 type
-func Int32Value(val string) (*wrappers.Int32Value, error) {
- parsedVal, err := Int32(val)
- return &wrappers.Int32Value{Value: parsedVal}, err
-}
-
-// UInt32Value well-known type support as wrapper around uint32 type
-func UInt32Value(val string) (*wrappers.UInt32Value, error) {
- parsedVal, err := Uint32(val)
- return &wrappers.UInt32Value{Value: parsedVal}, err
-}
-
-// Int64Value well-known type support as wrapper around int64 type
-func Int64Value(val string) (*wrappers.Int64Value, error) {
- parsedVal, err := Int64(val)
- return &wrappers.Int64Value{Value: parsedVal}, err
-}
-
-// UInt64Value well-known type support as wrapper around uint64 type
-func UInt64Value(val string) (*wrappers.UInt64Value, error) {
- parsedVal, err := Uint64(val)
- return &wrappers.UInt64Value{Value: parsedVal}, err
-}
-
-// BytesValue well-known type support as wrapper around bytes[] type
-func BytesValue(val string) (*wrappers.BytesValue, error) {
- parsedVal, err := Bytes(val)
- return &wrappers.BytesValue{Value: parsedVal}, err
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/doc.go
deleted file mode 100644
index b6e5ddf7a..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/doc.go
+++ /dev/null
@@ -1,5 +0,0 @@
-/*
-Package runtime contains runtime helper functions used by
-servers which protoc-gen-grpc-gateway generates.
-*/
-package runtime
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go
deleted file mode 100644
index 41d54ef91..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/errors.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package runtime
-
-import (
- "context"
- "io"
- "net/http"
-
- "github.com/golang/protobuf/proto"
- "github.com/golang/protobuf/ptypes/any"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/status"
-)
-
-// HTTPStatusFromCode converts a gRPC error code into the corresponding HTTP response status.
-// See: https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
-func HTTPStatusFromCode(code codes.Code) int {
- switch code {
- case codes.OK:
- return http.StatusOK
- case codes.Canceled:
- return http.StatusRequestTimeout
- case codes.Unknown:
- return http.StatusInternalServerError
- case codes.InvalidArgument:
- return http.StatusBadRequest
- case codes.DeadlineExceeded:
- return http.StatusGatewayTimeout
- case codes.NotFound:
- return http.StatusNotFound
- case codes.AlreadyExists:
- return http.StatusConflict
- case codes.PermissionDenied:
- return http.StatusForbidden
- case codes.Unauthenticated:
- return http.StatusUnauthorized
- case codes.ResourceExhausted:
- return http.StatusTooManyRequests
- case codes.FailedPrecondition:
- return http.StatusPreconditionFailed
- case codes.Aborted:
- return http.StatusConflict
- case codes.OutOfRange:
- return http.StatusBadRequest
- case codes.Unimplemented:
- return http.StatusNotImplemented
- case codes.Internal:
- return http.StatusInternalServerError
- case codes.Unavailable:
- return http.StatusServiceUnavailable
- case codes.DataLoss:
- return http.StatusInternalServerError
- }
-
- grpclog.Infof("Unknown gRPC error code: %v", code)
- return http.StatusInternalServerError
-}
-
-var (
- // HTTPError replies to the request with the error.
- // You can set a custom function to this variable to customize error format.
- HTTPError = DefaultHTTPError
- // OtherErrorHandler handles the following error used by the gateway: StatusMethodNotAllowed StatusNotFound and StatusBadRequest
- OtherErrorHandler = DefaultOtherErrorHandler
-)
-
-type errorBody struct {
- Error string `protobuf:"bytes,1,name=error" json:"error"`
- // This is to make the error more compatible with users that expect errors to be Status objects:
- // https://github.com/grpc/grpc/blob/master/src/proto/grpc/status/status.proto
- // It should be the exact same message as the Error field.
- Message string `protobuf:"bytes,1,name=message" json:"message"`
- Code int32 `protobuf:"varint,2,name=code" json:"code"`
- Details []*any.Any `protobuf:"bytes,3,rep,name=details" json:"details,omitempty"`
-}
-
-// Make this also conform to proto.Message for builtin JSONPb Marshaler
-func (e *errorBody) Reset() { *e = errorBody{} }
-func (e *errorBody) String() string { return proto.CompactTextString(e) }
-func (*errorBody) ProtoMessage() {}
-
-// DefaultHTTPError is the default implementation of HTTPError.
-// If "err" is an error from gRPC system, the function replies with the status code mapped by HTTPStatusFromCode.
-// If otherwise, it replies with http.StatusInternalServerError.
-//
-// The response body returned by this function is a JSON object,
-// which contains a member whose key is "error" and whose value is err.Error().
-func DefaultHTTPError(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, _ *http.Request, err error) {
- const fallback = `{"error": "failed to marshal error message"}`
-
- s, ok := status.FromError(err)
- if !ok {
- s = status.New(codes.Unknown, err.Error())
- }
-
- w.Header().Del("Trailer")
-
- contentType := marshaler.ContentType()
- // Check marshaler on run time in order to keep backwards compatability
- // An interface param needs to be added to the ContentType() function on
- // the Marshal interface to be able to remove this check
- if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
- pb := s.Proto()
- contentType = httpBodyMarshaler.ContentTypeFromMessage(pb)
- }
- w.Header().Set("Content-Type", contentType)
-
- body := &errorBody{
- Error: s.Message(),
- Message: s.Message(),
- Code: int32(s.Code()),
- Details: s.Proto().GetDetails(),
- }
-
- buf, merr := marshaler.Marshal(body)
- if merr != nil {
- grpclog.Infof("Failed to marshal error message %q: %v", body, merr)
- w.WriteHeader(http.StatusInternalServerError)
- if _, err := io.WriteString(w, fallback); err != nil {
- grpclog.Infof("Failed to write response: %v", err)
- }
- return
- }
-
- md, ok := ServerMetadataFromContext(ctx)
- if !ok {
- grpclog.Infof("Failed to extract ServerMetadata from context")
- }
-
- handleForwardResponseServerMetadata(w, mux, md)
- handleForwardResponseTrailerHeader(w, md)
- st := HTTPStatusFromCode(s.Code())
- w.WriteHeader(st)
- if _, err := w.Write(buf); err != nil {
- grpclog.Infof("Failed to write response: %v", err)
- }
-
- handleForwardResponseTrailer(w, md)
-}
-
-// DefaultOtherErrorHandler is the default implementation of OtherErrorHandler.
-// It simply writes a string representation of the given error into "w".
-func DefaultOtherErrorHandler(w http.ResponseWriter, _ *http.Request, msg string, code int) {
- http.Error(w, msg, code)
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/fieldmask.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/fieldmask.go
deleted file mode 100644
index e1cf7a914..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/fieldmask.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package runtime
-
-import (
- "encoding/json"
- "io"
- "strings"
-
- "github.com/golang/protobuf/protoc-gen-go/generator"
- "google.golang.org/genproto/protobuf/field_mask"
-)
-
-// FieldMaskFromRequestBody creates a FieldMask printing all complete paths from the JSON body.
-func FieldMaskFromRequestBody(r io.Reader) (*field_mask.FieldMask, error) {
- fm := &field_mask.FieldMask{}
- var root interface{}
- if err := json.NewDecoder(r).Decode(&root); err != nil {
- if err == io.EOF {
- return fm, nil
- }
- return nil, err
- }
-
- queue := []fieldMaskPathItem{{node: root}}
- for len(queue) > 0 {
- // dequeue an item
- item := queue[0]
- queue = queue[1:]
-
- if m, ok := item.node.(map[string]interface{}); ok {
- // if the item is an object, then enqueue all of its children
- for k, v := range m {
- queue = append(queue, fieldMaskPathItem{path: append(item.path, generator.CamelCase(k)), node: v})
- }
- } else if len(item.path) > 0 {
- // otherwise, it's a leaf node so print its path
- fm.Paths = append(fm.Paths, strings.Join(item.path, "."))
- }
- }
-
- return fm, nil
-}
-
-// fieldMaskPathItem stores a in-progress deconstruction of a path for a fieldmask
-type fieldMaskPathItem struct {
- // the list of prior fields leading up to node
- path []string
-
- // a generic decoded json object the current item to inspect for further path extraction
- node interface{}
-}
-
-// CamelCaseFieldMask updates the given FieldMask by converting all of its paths to CamelCase, using the same heuristic
-// that's used for naming protobuf fields in Go.
-func CamelCaseFieldMask(mask *field_mask.FieldMask) {
- if mask == nil || mask.Paths == nil {
- return
- }
-
- var newPaths []string
- for _, path := range mask.Paths {
- lowerCasedParts := strings.Split(path, ".")
- var camelCasedParts []string
- for _, part := range lowerCasedParts {
- camelCasedParts = append(camelCasedParts, generator.CamelCase(part))
- }
- newPaths = append(newPaths, strings.Join(camelCasedParts, "."))
- }
-
- mask.Paths = newPaths
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go
deleted file mode 100644
index 1fc63f7f5..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/handler.go
+++ /dev/null
@@ -1,215 +0,0 @@
-package runtime
-
-import (
- "fmt"
- "io"
- "net/http"
- "net/textproto"
-
- "context"
- "github.com/golang/protobuf/proto"
- "github.com/golang/protobuf/ptypes/any"
- "github.com/grpc-ecosystem/grpc-gateway/internal"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/status"
-)
-
-// ForwardResponseStream forwards the stream from gRPC server to REST client.
-func ForwardResponseStream(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, req *http.Request, recv func() (proto.Message, error), opts ...func(context.Context, http.ResponseWriter, proto.Message) error) {
- f, ok := w.(http.Flusher)
- if !ok {
- grpclog.Infof("Flush not supported in %T", w)
- http.Error(w, "unexpected type of web server", http.StatusInternalServerError)
- return
- }
-
- md, ok := ServerMetadataFromContext(ctx)
- if !ok {
- grpclog.Infof("Failed to extract ServerMetadata from context")
- http.Error(w, "unexpected error", http.StatusInternalServerError)
- return
- }
- handleForwardResponseServerMetadata(w, mux, md)
-
- w.Header().Set("Transfer-Encoding", "chunked")
- w.Header().Set("Content-Type", marshaler.ContentType())
- if err := handleForwardResponseOptions(ctx, w, nil, opts); err != nil {
- HTTPError(ctx, mux, marshaler, w, req, err)
- return
- }
-
- var delimiter []byte
- if d, ok := marshaler.(Delimited); ok {
- delimiter = d.Delimiter()
- } else {
- delimiter = []byte("\n")
- }
-
- var wroteHeader bool
- for {
- resp, err := recv()
- if err == io.EOF {
- return
- }
- if err != nil {
- handleForwardResponseStreamError(wroteHeader, marshaler, w, err)
- return
- }
- if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil {
- handleForwardResponseStreamError(wroteHeader, marshaler, w, err)
- return
- }
-
- buf, err := marshaler.Marshal(streamChunk(resp, nil))
- if err != nil {
- grpclog.Infof("Failed to marshal response chunk: %v", err)
- handleForwardResponseStreamError(wroteHeader, marshaler, w, err)
- return
- }
- if _, err = w.Write(buf); err != nil {
- grpclog.Infof("Failed to send response chunk: %v", err)
- return
- }
- wroteHeader = true
- if _, err = w.Write(delimiter); err != nil {
- grpclog.Infof("Failed to send delimiter chunk: %v", err)
- return
- }
- f.Flush()
- }
-}
-
-func handleForwardResponseServerMetadata(w http.ResponseWriter, mux *ServeMux, md ServerMetadata) {
- for k, vs := range md.HeaderMD {
- if h, ok := mux.outgoingHeaderMatcher(k); ok {
- for _, v := range vs {
- w.Header().Add(h, v)
- }
- }
- }
-}
-
-func handleForwardResponseTrailerHeader(w http.ResponseWriter, md ServerMetadata) {
- for k := range md.TrailerMD {
- tKey := textproto.CanonicalMIMEHeaderKey(fmt.Sprintf("%s%s", MetadataTrailerPrefix, k))
- w.Header().Add("Trailer", tKey)
- }
-}
-
-func handleForwardResponseTrailer(w http.ResponseWriter, md ServerMetadata) {
- for k, vs := range md.TrailerMD {
- tKey := fmt.Sprintf("%s%s", MetadataTrailerPrefix, k)
- for _, v := range vs {
- w.Header().Add(tKey, v)
- }
- }
-}
-
-// responseBody interface contains method for getting field for marshaling to the response body
-// this method is generated for response struct from the value of `response_body` in the `google.api.HttpRule`
-type responseBody interface {
- XXX_ResponseBody() interface{}
-}
-
-// ForwardResponseMessage forwards the message "resp" from gRPC server to REST client.
-func ForwardResponseMessage(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, req *http.Request, resp proto.Message, opts ...func(context.Context, http.ResponseWriter, proto.Message) error) {
- md, ok := ServerMetadataFromContext(ctx)
- if !ok {
- grpclog.Infof("Failed to extract ServerMetadata from context")
- }
-
- handleForwardResponseServerMetadata(w, mux, md)
- handleForwardResponseTrailerHeader(w, md)
-
- contentType := marshaler.ContentType()
- // Check marshaler on run time in order to keep backwards compatability
- // An interface param needs to be added to the ContentType() function on
- // the Marshal interface to be able to remove this check
- if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
- contentType = httpBodyMarshaler.ContentTypeFromMessage(resp)
- }
- w.Header().Set("Content-Type", contentType)
-
- if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil {
- HTTPError(ctx, mux, marshaler, w, req, err)
- return
- }
- var buf []byte
- var err error
- if rb, ok := resp.(responseBody); ok {
- buf, err = marshaler.Marshal(rb.XXX_ResponseBody())
- } else {
- buf, err = marshaler.Marshal(resp)
- }
- if err != nil {
- grpclog.Infof("Marshal error: %v", err)
- HTTPError(ctx, mux, marshaler, w, req, err)
- return
- }
-
- if _, err = w.Write(buf); err != nil {
- grpclog.Infof("Failed to write response: %v", err)
- }
-
- handleForwardResponseTrailer(w, md)
-}
-
-func handleForwardResponseOptions(ctx context.Context, w http.ResponseWriter, resp proto.Message, opts []func(context.Context, http.ResponseWriter, proto.Message) error) error {
- if len(opts) == 0 {
- return nil
- }
- for _, opt := range opts {
- if err := opt(ctx, w, resp); err != nil {
- grpclog.Infof("Error handling ForwardResponseOptions: %v", err)
- return err
- }
- }
- return nil
-}
-
-func handleForwardResponseStreamError(wroteHeader bool, marshaler Marshaler, w http.ResponseWriter, err error) {
- buf, merr := marshaler.Marshal(streamChunk(nil, err))
- if merr != nil {
- grpclog.Infof("Failed to marshal an error: %v", merr)
- return
- }
- if !wroteHeader {
- s, ok := status.FromError(err)
- if !ok {
- s = status.New(codes.Unknown, err.Error())
- }
- w.WriteHeader(HTTPStatusFromCode(s.Code()))
- }
- if _, werr := w.Write(buf); werr != nil {
- grpclog.Infof("Failed to notify error to client: %v", werr)
- return
- }
-}
-
-func streamChunk(result proto.Message, err error) map[string]proto.Message {
- if err != nil {
- grpcCode := codes.Unknown
- grpcMessage := err.Error()
- var grpcDetails []*any.Any
- if s, ok := status.FromError(err); ok {
- grpcCode = s.Code()
- grpcMessage = s.Message()
- grpcDetails = s.Proto().GetDetails()
- }
- httpCode := HTTPStatusFromCode(grpcCode)
- return map[string]proto.Message{
- "error": &internal.StreamError{
- GrpcCode: int32(grpcCode),
- HttpCode: int32(httpCode),
- Message: grpcMessage,
- HttpStatus: http.StatusText(httpCode),
- Details: grpcDetails,
- },
- }
- }
- if result == nil {
- return streamChunk(nil, fmt.Errorf("empty response"))
- }
- return map[string]proto.Message{"result": result}
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_httpbodyproto.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_httpbodyproto.go
deleted file mode 100644
index f55285b5d..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_httpbodyproto.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package runtime
-
-import (
- "google.golang.org/genproto/googleapis/api/httpbody"
-)
-
-// SetHTTPBodyMarshaler overwrite the default marshaler with the HTTPBodyMarshaler
-func SetHTTPBodyMarshaler(serveMux *ServeMux) {
- serveMux.marshalers.mimeMap[MIMEWildcard] = &HTTPBodyMarshaler{
- Marshaler: &JSONPb{OrigName: true},
- }
-}
-
-// HTTPBodyMarshaler is a Marshaler which supports marshaling of a
-// google.api.HttpBody message as the full response body if it is
-// the actual message used as the response. If not, then this will
-// simply fallback to the Marshaler specified as its default Marshaler.
-type HTTPBodyMarshaler struct {
- Marshaler
-}
-
-// ContentType implementation to keep backwards compatability with marshal interface
-func (h *HTTPBodyMarshaler) ContentType() string {
- return h.ContentTypeFromMessage(nil)
-}
-
-// ContentTypeFromMessage in case v is a google.api.HttpBody message it returns
-// its specified content type otherwise fall back to the default Marshaler.
-func (h *HTTPBodyMarshaler) ContentTypeFromMessage(v interface{}) string {
- if httpBody, ok := v.(*httpbody.HttpBody); ok {
- return httpBody.GetContentType()
- }
- return h.Marshaler.ContentType()
-}
-
-// Marshal marshals "v" by returning the body bytes if v is a
-// google.api.HttpBody message, otherwise it falls back to the default Marshaler.
-func (h *HTTPBodyMarshaler) Marshal(v interface{}) ([]byte, error) {
- if httpBody, ok := v.(*httpbody.HttpBody); ok {
- return httpBody.Data, nil
- }
- return h.Marshaler.Marshal(v)
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go
deleted file mode 100644
index f9d3a585a..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_json.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package runtime
-
-import (
- "encoding/json"
- "io"
-)
-
-// JSONBuiltin is a Marshaler which marshals/unmarshals into/from JSON
-// with the standard "encoding/json" package of Golang.
-// Although it is generally faster for simple proto messages than JSONPb,
-// it does not support advanced features of protobuf, e.g. map, oneof, ....
-//
-// The NewEncoder and NewDecoder types return *json.Encoder and
-// *json.Decoder respectively.
-type JSONBuiltin struct{}
-
-// ContentType always Returns "application/json".
-func (*JSONBuiltin) ContentType() string {
- return "application/json"
-}
-
-// Marshal marshals "v" into JSON
-func (j *JSONBuiltin) Marshal(v interface{}) ([]byte, error) {
- return json.Marshal(v)
-}
-
-// Unmarshal unmarshals JSON data into "v".
-func (j *JSONBuiltin) Unmarshal(data []byte, v interface{}) error {
- return json.Unmarshal(data, v)
-}
-
-// NewDecoder returns a Decoder which reads JSON stream from "r".
-func (j *JSONBuiltin) NewDecoder(r io.Reader) Decoder {
- return json.NewDecoder(r)
-}
-
-// NewEncoder returns an Encoder which writes JSON stream into "w".
-func (j *JSONBuiltin) NewEncoder(w io.Writer) Encoder {
- return json.NewEncoder(w)
-}
-
-// Delimiter for newline encoded JSON streams.
-func (j *JSONBuiltin) Delimiter() []byte {
- return []byte("\n")
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go
deleted file mode 100644
index 3530dddd0..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_jsonpb.go
+++ /dev/null
@@ -1,242 +0,0 @@
-package runtime
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "io"
- "reflect"
-
- "github.com/golang/protobuf/jsonpb"
- "github.com/golang/protobuf/proto"
-)
-
-// JSONPb is a Marshaler which marshals/unmarshals into/from JSON
-// with the "github.com/golang/protobuf/jsonpb".
-// It supports fully functionality of protobuf unlike JSONBuiltin.
-//
-// The NewDecoder method returns a DecoderWrapper, so the underlying
-// *json.Decoder methods can be used.
-type JSONPb jsonpb.Marshaler
-
-// ContentType always returns "application/json".
-func (*JSONPb) ContentType() string {
- return "application/json"
-}
-
-// Marshal marshals "v" into JSON.
-func (j *JSONPb) Marshal(v interface{}) ([]byte, error) {
- if _, ok := v.(proto.Message); !ok {
- return j.marshalNonProtoField(v)
- }
-
- var buf bytes.Buffer
- if err := j.marshalTo(&buf, v); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-
-func (j *JSONPb) marshalTo(w io.Writer, v interface{}) error {
- p, ok := v.(proto.Message)
- if !ok {
- buf, err := j.marshalNonProtoField(v)
- if err != nil {
- return err
- }
- _, err = w.Write(buf)
- return err
- }
- return (*jsonpb.Marshaler)(j).Marshal(w, p)
-}
-
-var (
- // protoMessageType is stored to prevent constant lookup of the same type at runtime.
- protoMessageType = reflect.TypeOf((*proto.Message)(nil)).Elem()
-)
-
-// marshalNonProto marshals a non-message field of a protobuf message.
-// This function does not correctly marshals arbitrary data structure into JSON,
-// but it is only capable of marshaling non-message field values of protobuf,
-// i.e. primitive types, enums; pointers to primitives or enums; maps from
-// integer/string types to primitives/enums/pointers to messages.
-func (j *JSONPb) marshalNonProtoField(v interface{}) ([]byte, error) {
- if v == nil {
- return []byte("null"), nil
- }
- rv := reflect.ValueOf(v)
- for rv.Kind() == reflect.Ptr {
- if rv.IsNil() {
- return []byte("null"), nil
- }
- rv = rv.Elem()
- }
-
- if rv.Kind() == reflect.Slice {
- if rv.IsNil() {
- if j.EmitDefaults {
- return []byte("[]"), nil
- }
- return []byte("null"), nil
- }
-
- if rv.Type().Elem().Implements(protoMessageType) {
- var buf bytes.Buffer
- err := buf.WriteByte('[')
- if err != nil {
- return nil, err
- }
- for i := 0; i < rv.Len(); i++ {
- if i != 0 {
- err = buf.WriteByte(',')
- if err != nil {
- return nil, err
- }
- }
- if err = (*jsonpb.Marshaler)(j).Marshal(&buf, rv.Index(i).Interface().(proto.Message)); err != nil {
- return nil, err
- }
- }
- err = buf.WriteByte(']')
- if err != nil {
- return nil, err
- }
-
- return buf.Bytes(), nil
- }
- }
-
- if rv.Kind() == reflect.Map {
- m := make(map[string]*json.RawMessage)
- for _, k := range rv.MapKeys() {
- buf, err := j.Marshal(rv.MapIndex(k).Interface())
- if err != nil {
- return nil, err
- }
- m[fmt.Sprintf("%v", k.Interface())] = (*json.RawMessage)(&buf)
- }
- if j.Indent != "" {
- return json.MarshalIndent(m, "", j.Indent)
- }
- return json.Marshal(m)
- }
- if enum, ok := rv.Interface().(protoEnum); ok && !j.EnumsAsInts {
- return json.Marshal(enum.String())
- }
- return json.Marshal(rv.Interface())
-}
-
-// Unmarshal unmarshals JSON "data" into "v"
-func (j *JSONPb) Unmarshal(data []byte, v interface{}) error {
- return unmarshalJSONPb(data, v)
-}
-
-// NewDecoder returns a Decoder which reads JSON stream from "r".
-func (j *JSONPb) NewDecoder(r io.Reader) Decoder {
- d := json.NewDecoder(r)
- return DecoderWrapper{Decoder: d}
-}
-
-// DecoderWrapper is a wrapper around a *json.Decoder that adds
-// support for protos to the Decode method.
-type DecoderWrapper struct {
- *json.Decoder
-}
-
-// Decode wraps the embedded decoder's Decode method to support
-// protos using a jsonpb.Unmarshaler.
-func (d DecoderWrapper) Decode(v interface{}) error {
- return decodeJSONPb(d.Decoder, v)
-}
-
-// NewEncoder returns an Encoder which writes JSON stream into "w".
-func (j *JSONPb) NewEncoder(w io.Writer) Encoder {
- return EncoderFunc(func(v interface{}) error { return j.marshalTo(w, v) })
-}
-
-func unmarshalJSONPb(data []byte, v interface{}) error {
- d := json.NewDecoder(bytes.NewReader(data))
- return decodeJSONPb(d, v)
-}
-
-func decodeJSONPb(d *json.Decoder, v interface{}) error {
- p, ok := v.(proto.Message)
- if !ok {
- return decodeNonProtoField(d, v)
- }
- unmarshaler := &jsonpb.Unmarshaler{AllowUnknownFields: true}
- return unmarshaler.UnmarshalNext(d, p)
-}
-
-func decodeNonProtoField(d *json.Decoder, v interface{}) error {
- rv := reflect.ValueOf(v)
- if rv.Kind() != reflect.Ptr {
- return fmt.Errorf("%T is not a pointer", v)
- }
- for rv.Kind() == reflect.Ptr {
- if rv.IsNil() {
- rv.Set(reflect.New(rv.Type().Elem()))
- }
- if rv.Type().ConvertibleTo(typeProtoMessage) {
- unmarshaler := &jsonpb.Unmarshaler{AllowUnknownFields: true}
- return unmarshaler.UnmarshalNext(d, rv.Interface().(proto.Message))
- }
- rv = rv.Elem()
- }
- if rv.Kind() == reflect.Map {
- if rv.IsNil() {
- rv.Set(reflect.MakeMap(rv.Type()))
- }
- conv, ok := convFromType[rv.Type().Key().Kind()]
- if !ok {
- return fmt.Errorf("unsupported type of map field key: %v", rv.Type().Key())
- }
-
- m := make(map[string]*json.RawMessage)
- if err := d.Decode(&m); err != nil {
- return err
- }
- for k, v := range m {
- result := conv.Call([]reflect.Value{reflect.ValueOf(k)})
- if err := result[1].Interface(); err != nil {
- return err.(error)
- }
- bk := result[0]
- bv := reflect.New(rv.Type().Elem())
- if err := unmarshalJSONPb([]byte(*v), bv.Interface()); err != nil {
- return err
- }
- rv.SetMapIndex(bk, bv.Elem())
- }
- return nil
- }
- if _, ok := rv.Interface().(protoEnum); ok {
- var repr interface{}
- if err := d.Decode(&repr); err != nil {
- return err
- }
- switch repr.(type) {
- case string:
- // TODO(yugui) Should use proto.StructProperties?
- return fmt.Errorf("unmarshaling of symbolic enum %q not supported: %T", repr, rv.Interface())
- case float64:
- rv.Set(reflect.ValueOf(int32(repr.(float64))).Convert(rv.Type()))
- return nil
- default:
- return fmt.Errorf("cannot assign %#v into Go type %T", repr, rv.Interface())
- }
- }
- return d.Decode(v)
-}
-
-type protoEnum interface {
- fmt.Stringer
- EnumDescriptor() ([]byte, []int)
-}
-
-var typeProtoMessage = reflect.TypeOf((*proto.Message)(nil)).Elem()
-
-// Delimiter for newline encoded JSON streams.
-func (j *JSONPb) Delimiter() []byte {
- return []byte("\n")
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go
deleted file mode 100644
index f65d1a267..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshal_proto.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package runtime
-
-import (
- "io"
-
- "errors"
- "github.com/golang/protobuf/proto"
- "io/ioutil"
-)
-
-// ProtoMarshaller is a Marshaller which marshals/unmarshals into/from serialize proto bytes
-type ProtoMarshaller struct{}
-
-// ContentType always returns "application/octet-stream".
-func (*ProtoMarshaller) ContentType() string {
- return "application/octet-stream"
-}
-
-// Marshal marshals "value" into Proto
-func (*ProtoMarshaller) Marshal(value interface{}) ([]byte, error) {
- message, ok := value.(proto.Message)
- if !ok {
- return nil, errors.New("unable to marshal non proto field")
- }
- return proto.Marshal(message)
-}
-
-// Unmarshal unmarshals proto "data" into "value"
-func (*ProtoMarshaller) Unmarshal(data []byte, value interface{}) error {
- message, ok := value.(proto.Message)
- if !ok {
- return errors.New("unable to unmarshal non proto field")
- }
- return proto.Unmarshal(data, message)
-}
-
-// NewDecoder returns a Decoder which reads proto stream from "reader".
-func (marshaller *ProtoMarshaller) NewDecoder(reader io.Reader) Decoder {
- return DecoderFunc(func(value interface{}) error {
- buffer, err := ioutil.ReadAll(reader)
- if err != nil {
- return err
- }
- return marshaller.Unmarshal(buffer, value)
- })
-}
-
-// NewEncoder returns an Encoder which writes proto stream into "writer".
-func (marshaller *ProtoMarshaller) NewEncoder(writer io.Writer) Encoder {
- return EncoderFunc(func(value interface{}) error {
- buffer, err := marshaller.Marshal(value)
- if err != nil {
- return err
- }
- _, err = writer.Write(buffer)
- if err != nil {
- return err
- }
-
- return nil
- })
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go
deleted file mode 100644
index 98fe6e88a..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package runtime
-
-import (
- "io"
-)
-
-// Marshaler defines a conversion between byte sequence and gRPC payloads / fields.
-type Marshaler interface {
- // Marshal marshals "v" into byte sequence.
- Marshal(v interface{}) ([]byte, error)
- // Unmarshal unmarshals "data" into "v".
- // "v" must be a pointer value.
- Unmarshal(data []byte, v interface{}) error
- // NewDecoder returns a Decoder which reads byte sequence from "r".
- NewDecoder(r io.Reader) Decoder
- // NewEncoder returns an Encoder which writes bytes sequence into "w".
- NewEncoder(w io.Writer) Encoder
- // ContentType returns the Content-Type which this marshaler is responsible for.
- ContentType() string
-}
-
-// Decoder decodes a byte sequence
-type Decoder interface {
- Decode(v interface{}) error
-}
-
-// Encoder encodes gRPC payloads / fields into byte sequence.
-type Encoder interface {
- Encode(v interface{}) error
-}
-
-// DecoderFunc adapts an decoder function into Decoder.
-type DecoderFunc func(v interface{}) error
-
-// Decode delegates invocations to the underlying function itself.
-func (f DecoderFunc) Decode(v interface{}) error { return f(v) }
-
-// EncoderFunc adapts an encoder function into Encoder
-type EncoderFunc func(v interface{}) error
-
-// Encode delegates invocations to the underlying function itself.
-func (f EncoderFunc) Encode(v interface{}) error { return f(v) }
-
-// Delimited defines the streaming delimiter.
-type Delimited interface {
- // Delimiter returns the record seperator for the stream.
- Delimiter() []byte
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go
deleted file mode 100644
index 5cc53ae4f..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/marshaler_registry.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package runtime
-
-import (
- "errors"
- "net/http"
-)
-
-// MIMEWildcard is the fallback MIME type used for requests which do not match
-// a registered MIME type.
-const MIMEWildcard = "*"
-
-var (
- acceptHeader = http.CanonicalHeaderKey("Accept")
- contentTypeHeader = http.CanonicalHeaderKey("Content-Type")
-
- defaultMarshaler = &JSONPb{OrigName: true}
-)
-
-// MarshalerForRequest returns the inbound/outbound marshalers for this request.
-// It checks the registry on the ServeMux for the MIME type set by the Content-Type header.
-// If it isn't set (or the request Content-Type is empty), checks for "*".
-// If there are multiple Content-Type headers set, choose the first one that it can
-// exactly match in the registry.
-// Otherwise, it follows the above logic for "*"/InboundMarshaler/OutboundMarshaler.
-func MarshalerForRequest(mux *ServeMux, r *http.Request) (inbound Marshaler, outbound Marshaler) {
- for _, acceptVal := range r.Header[acceptHeader] {
- if m, ok := mux.marshalers.mimeMap[acceptVal]; ok {
- outbound = m
- break
- }
- }
-
- for _, contentTypeVal := range r.Header[contentTypeHeader] {
- if m, ok := mux.marshalers.mimeMap[contentTypeVal]; ok {
- inbound = m
- break
- }
- }
-
- if inbound == nil {
- inbound = mux.marshalers.mimeMap[MIMEWildcard]
- }
- if outbound == nil {
- outbound = inbound
- }
-
- return inbound, outbound
-}
-
-// marshalerRegistry is a mapping from MIME types to Marshalers.
-type marshalerRegistry struct {
- mimeMap map[string]Marshaler
-}
-
-// add adds a marshaler for a case-sensitive MIME type string ("*" to match any
-// MIME type).
-func (m marshalerRegistry) add(mime string, marshaler Marshaler) error {
- if len(mime) == 0 {
- return errors.New("empty MIME type")
- }
-
- m.mimeMap[mime] = marshaler
-
- return nil
-}
-
-// makeMarshalerMIMERegistry returns a new registry of marshalers.
-// It allows for a mapping of case-sensitive Content-Type MIME type string to runtime.Marshaler interfaces.
-//
-// For example, you could allow the client to specify the use of the runtime.JSONPb marshaler
-// with a "application/jsonpb" Content-Type and the use of the runtime.JSONBuiltin marshaler
-// with a "application/json" Content-Type.
-// "*" can be used to match any Content-Type.
-// This can be attached to a ServerMux with the marshaler option.
-func makeMarshalerMIMERegistry() marshalerRegistry {
- return marshalerRegistry{
- mimeMap: map[string]Marshaler{
- MIMEWildcard: defaultMarshaler,
- },
- }
-}
-
-// WithMarshalerOption returns a ServeMuxOption which associates inbound and outbound
-// Marshalers to a MIME type in mux.
-func WithMarshalerOption(mime string, marshaler Marshaler) ServeMuxOption {
- return func(mux *ServeMux) {
- if err := mux.marshalers.add(mime, marshaler); err != nil {
- panic(err)
- }
- }
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go
deleted file mode 100644
index ec81e55b5..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/mux.go
+++ /dev/null
@@ -1,268 +0,0 @@
-package runtime
-
-import (
- "context"
- "fmt"
- "net/http"
- "net/textproto"
- "strings"
-
- "github.com/golang/protobuf/proto"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/metadata"
- "google.golang.org/grpc/status"
-)
-
-// A HandlerFunc handles a specific pair of path pattern and HTTP method.
-type HandlerFunc func(w http.ResponseWriter, r *http.Request, pathParams map[string]string)
-
-// ServeMux is a request multiplexer for grpc-gateway.
-// It matches http requests to patterns and invokes the corresponding handler.
-type ServeMux struct {
- // handlers maps HTTP method to a list of handlers.
- handlers map[string][]handler
- forwardResponseOptions []func(context.Context, http.ResponseWriter, proto.Message) error
- marshalers marshalerRegistry
- incomingHeaderMatcher HeaderMatcherFunc
- outgoingHeaderMatcher HeaderMatcherFunc
- metadataAnnotators []func(context.Context, *http.Request) metadata.MD
- protoErrorHandler ProtoErrorHandlerFunc
- disablePathLengthFallback bool
-}
-
-// ServeMuxOption is an option that can be given to a ServeMux on construction.
-type ServeMuxOption func(*ServeMux)
-
-// WithForwardResponseOption returns a ServeMuxOption representing the forwardResponseOption.
-//
-// forwardResponseOption is an option that will be called on the relevant context.Context,
-// http.ResponseWriter, and proto.Message before every forwarded response.
-//
-// The message may be nil in the case where just a header is being sent.
-func WithForwardResponseOption(forwardResponseOption func(context.Context, http.ResponseWriter, proto.Message) error) ServeMuxOption {
- return func(serveMux *ServeMux) {
- serveMux.forwardResponseOptions = append(serveMux.forwardResponseOptions, forwardResponseOption)
- }
-}
-
-// HeaderMatcherFunc checks whether a header key should be forwarded to/from gRPC context.
-type HeaderMatcherFunc func(string) (string, bool)
-
-// DefaultHeaderMatcher is used to pass http request headers to/from gRPC context. This adds permanent HTTP header
-// keys (as specified by the IANA) to gRPC context with grpcgateway- prefix. HTTP headers that start with
-// 'Grpc-Metadata-' are mapped to gRPC metadata after removing prefix 'Grpc-Metadata-'.
-func DefaultHeaderMatcher(key string) (string, bool) {
- key = textproto.CanonicalMIMEHeaderKey(key)
- if isPermanentHTTPHeader(key) {
- return MetadataPrefix + key, true
- } else if strings.HasPrefix(key, MetadataHeaderPrefix) {
- return key[len(MetadataHeaderPrefix):], true
- }
- return "", false
-}
-
-// WithIncomingHeaderMatcher returns a ServeMuxOption representing a headerMatcher for incoming request to gateway.
-//
-// This matcher will be called with each header in http.Request. If matcher returns true, that header will be
-// passed to gRPC context. To transform the header before passing to gRPC context, matcher should return modified header.
-func WithIncomingHeaderMatcher(fn HeaderMatcherFunc) ServeMuxOption {
- return func(mux *ServeMux) {
- mux.incomingHeaderMatcher = fn
- }
-}
-
-// WithOutgoingHeaderMatcher returns a ServeMuxOption representing a headerMatcher for outgoing response from gateway.
-//
-// This matcher will be called with each header in response header metadata. If matcher returns true, that header will be
-// passed to http response returned from gateway. To transform the header before passing to response,
-// matcher should return modified header.
-func WithOutgoingHeaderMatcher(fn HeaderMatcherFunc) ServeMuxOption {
- return func(mux *ServeMux) {
- mux.outgoingHeaderMatcher = fn
- }
-}
-
-// WithMetadata returns a ServeMuxOption for passing metadata to a gRPC context.
-//
-// This can be used by services that need to read from http.Request and modify gRPC context. A common use case
-// is reading token from cookie and adding it in gRPC context.
-func WithMetadata(annotator func(context.Context, *http.Request) metadata.MD) ServeMuxOption {
- return func(serveMux *ServeMux) {
- serveMux.metadataAnnotators = append(serveMux.metadataAnnotators, annotator)
- }
-}
-
-// WithProtoErrorHandler returns a ServeMuxOption for passing metadata to a gRPC context.
-//
-// This can be used to handle an error as general proto message defined by gRPC.
-// The response including body and status is not backward compatible with the default error handler.
-// When this option is used, HTTPError and OtherErrorHandler are overwritten on initialization.
-func WithProtoErrorHandler(fn ProtoErrorHandlerFunc) ServeMuxOption {
- return func(serveMux *ServeMux) {
- serveMux.protoErrorHandler = fn
- }
-}
-
-// WithDisablePathLengthFallback returns a ServeMuxOption for disable path length fallback.
-func WithDisablePathLengthFallback() ServeMuxOption {
- return func(serveMux *ServeMux) {
- serveMux.disablePathLengthFallback = true
- }
-}
-
-// NewServeMux returns a new ServeMux whose internal mapping is empty.
-func NewServeMux(opts ...ServeMuxOption) *ServeMux {
- serveMux := &ServeMux{
- handlers: make(map[string][]handler),
- forwardResponseOptions: make([]func(context.Context, http.ResponseWriter, proto.Message) error, 0),
- marshalers: makeMarshalerMIMERegistry(),
- }
-
- for _, opt := range opts {
- opt(serveMux)
- }
-
- if serveMux.protoErrorHandler != nil {
- HTTPError = serveMux.protoErrorHandler
- // OtherErrorHandler is no longer used when protoErrorHandler is set.
- // Overwritten by a special error handler to return Unknown.
- OtherErrorHandler = func(w http.ResponseWriter, r *http.Request, _ string, _ int) {
- ctx := context.Background()
- _, outboundMarshaler := MarshalerForRequest(serveMux, r)
- sterr := status.Error(codes.Unknown, "unexpected use of OtherErrorHandler")
- serveMux.protoErrorHandler(ctx, serveMux, outboundMarshaler, w, r, sterr)
- }
- }
-
- if serveMux.incomingHeaderMatcher == nil {
- serveMux.incomingHeaderMatcher = DefaultHeaderMatcher
- }
-
- if serveMux.outgoingHeaderMatcher == nil {
- serveMux.outgoingHeaderMatcher = func(key string) (string, bool) {
- return fmt.Sprintf("%s%s", MetadataHeaderPrefix, key), true
- }
- }
-
- return serveMux
-}
-
-// Handle associates "h" to the pair of HTTP method and path pattern.
-func (s *ServeMux) Handle(meth string, pat Pattern, h HandlerFunc) {
- s.handlers[meth] = append(s.handlers[meth], handler{pat: pat, h: h})
-}
-
-// ServeHTTP dispatches the request to the first handler whose pattern matches to r.Method and r.Path.
-func (s *ServeMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
- ctx := r.Context()
-
- path := r.URL.Path
- if !strings.HasPrefix(path, "/") {
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.InvalidArgument, http.StatusText(http.StatusBadRequest))
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
- }
- return
- }
-
- components := strings.Split(path[1:], "/")
- l := len(components)
- var verb string
- if idx := strings.LastIndex(components[l-1], ":"); idx == 0 {
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.Unimplemented, http.StatusText(http.StatusNotImplemented))
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, http.StatusText(http.StatusNotFound), http.StatusNotFound)
- }
- return
- } else if idx > 0 {
- c := components[l-1]
- components[l-1], verb = c[:idx], c[idx+1:]
- }
-
- if override := r.Header.Get("X-HTTP-Method-Override"); override != "" && s.isPathLengthFallback(r) {
- r.Method = strings.ToUpper(override)
- if err := r.ParseForm(); err != nil {
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.InvalidArgument, err.Error())
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, err.Error(), http.StatusBadRequest)
- }
- return
- }
- }
- for _, h := range s.handlers[r.Method] {
- pathParams, err := h.pat.Match(components, verb)
- if err != nil {
- continue
- }
- h.h(w, r, pathParams)
- return
- }
-
- // lookup other methods to handle fallback from GET to POST and
- // to determine if it is MethodNotAllowed or NotFound.
- for m, handlers := range s.handlers {
- if m == r.Method {
- continue
- }
- for _, h := range handlers {
- pathParams, err := h.pat.Match(components, verb)
- if err != nil {
- continue
- }
- // X-HTTP-Method-Override is optional. Always allow fallback to POST.
- if s.isPathLengthFallback(r) {
- if err := r.ParseForm(); err != nil {
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.InvalidArgument, err.Error())
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, err.Error(), http.StatusBadRequest)
- }
- return
- }
- h.h(w, r, pathParams)
- return
- }
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.Unimplemented, http.StatusText(http.StatusMethodNotAllowed))
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
- }
- return
- }
- }
-
- if s.protoErrorHandler != nil {
- _, outboundMarshaler := MarshalerForRequest(s, r)
- sterr := status.Error(codes.Unimplemented, http.StatusText(http.StatusNotImplemented))
- s.protoErrorHandler(ctx, s, outboundMarshaler, w, r, sterr)
- } else {
- OtherErrorHandler(w, r, http.StatusText(http.StatusNotFound), http.StatusNotFound)
- }
-}
-
-// GetForwardResponseOptions returns the ForwardResponseOptions associated with this ServeMux.
-func (s *ServeMux) GetForwardResponseOptions() []func(context.Context, http.ResponseWriter, proto.Message) error {
- return s.forwardResponseOptions
-}
-
-func (s *ServeMux) isPathLengthFallback(r *http.Request) bool {
- return !s.disablePathLengthFallback && r.Method == "POST" && r.Header.Get("Content-Type") == "application/x-www-form-urlencoded"
-}
-
-type handler struct {
- pat Pattern
- h HandlerFunc
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/pattern.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/pattern.go
deleted file mode 100644
index f16a84ad3..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/pattern.go
+++ /dev/null
@@ -1,227 +0,0 @@
-package runtime
-
-import (
- "errors"
- "fmt"
- "strings"
-
- "github.com/grpc-ecosystem/grpc-gateway/utilities"
- "google.golang.org/grpc/grpclog"
-)
-
-var (
- // ErrNotMatch indicates that the given HTTP request path does not match to the pattern.
- ErrNotMatch = errors.New("not match to the path pattern")
- // ErrInvalidPattern indicates that the given definition of Pattern is not valid.
- ErrInvalidPattern = errors.New("invalid pattern")
-)
-
-type op struct {
- code utilities.OpCode
- operand int
-}
-
-// Pattern is a template pattern of http request paths defined in github.com/googleapis/googleapis/google/api/http.proto.
-type Pattern struct {
- // ops is a list of operations
- ops []op
- // pool is a constant pool indexed by the operands or vars.
- pool []string
- // vars is a list of variables names to be bound by this pattern
- vars []string
- // stacksize is the max depth of the stack
- stacksize int
- // tailLen is the length of the fixed-size segments after a deep wildcard
- tailLen int
- // verb is the VERB part of the path pattern. It is empty if the pattern does not have VERB part.
- verb string
-}
-
-// NewPattern returns a new Pattern from the given definition values.
-// "ops" is a sequence of op codes. "pool" is a constant pool.
-// "verb" is the verb part of the pattern. It is empty if the pattern does not have the part.
-// "version" must be 1 for now.
-// It returns an error if the given definition is invalid.
-func NewPattern(version int, ops []int, pool []string, verb string) (Pattern, error) {
- if version != 1 {
- grpclog.Infof("unsupported version: %d", version)
- return Pattern{}, ErrInvalidPattern
- }
-
- l := len(ops)
- if l%2 != 0 {
- grpclog.Infof("odd number of ops codes: %d", l)
- return Pattern{}, ErrInvalidPattern
- }
-
- var (
- typedOps []op
- stack, maxstack int
- tailLen int
- pushMSeen bool
- vars []string
- )
- for i := 0; i < l; i += 2 {
- op := op{code: utilities.OpCode(ops[i]), operand: ops[i+1]}
- switch op.code {
- case utilities.OpNop:
- continue
- case utilities.OpPush:
- if pushMSeen {
- tailLen++
- }
- stack++
- case utilities.OpPushM:
- if pushMSeen {
- grpclog.Infof("pushM appears twice")
- return Pattern{}, ErrInvalidPattern
- }
- pushMSeen = true
- stack++
- case utilities.OpLitPush:
- if op.operand < 0 || len(pool) <= op.operand {
- grpclog.Infof("negative literal index: %d", op.operand)
- return Pattern{}, ErrInvalidPattern
- }
- if pushMSeen {
- tailLen++
- }
- stack++
- case utilities.OpConcatN:
- if op.operand <= 0 {
- grpclog.Infof("negative concat size: %d", op.operand)
- return Pattern{}, ErrInvalidPattern
- }
- stack -= op.operand
- if stack < 0 {
- grpclog.Print("stack underflow")
- return Pattern{}, ErrInvalidPattern
- }
- stack++
- case utilities.OpCapture:
- if op.operand < 0 || len(pool) <= op.operand {
- grpclog.Infof("variable name index out of bound: %d", op.operand)
- return Pattern{}, ErrInvalidPattern
- }
- v := pool[op.operand]
- op.operand = len(vars)
- vars = append(vars, v)
- stack--
- if stack < 0 {
- grpclog.Infof("stack underflow")
- return Pattern{}, ErrInvalidPattern
- }
- default:
- grpclog.Infof("invalid opcode: %d", op.code)
- return Pattern{}, ErrInvalidPattern
- }
-
- if maxstack < stack {
- maxstack = stack
- }
- typedOps = append(typedOps, op)
- }
- return Pattern{
- ops: typedOps,
- pool: pool,
- vars: vars,
- stacksize: maxstack,
- tailLen: tailLen,
- verb: verb,
- }, nil
-}
-
-// MustPattern is a helper function which makes it easier to call NewPattern in variable initialization.
-func MustPattern(p Pattern, err error) Pattern {
- if err != nil {
- grpclog.Fatalf("Pattern initialization failed: %v", err)
- }
- return p
-}
-
-// Match examines components if it matches to the Pattern.
-// If it matches, the function returns a mapping from field paths to their captured values.
-// If otherwise, the function returns an error.
-func (p Pattern) Match(components []string, verb string) (map[string]string, error) {
- if p.verb != verb {
- return nil, ErrNotMatch
- }
-
- var pos int
- stack := make([]string, 0, p.stacksize)
- captured := make([]string, len(p.vars))
- l := len(components)
- for _, op := range p.ops {
- switch op.code {
- case utilities.OpNop:
- continue
- case utilities.OpPush, utilities.OpLitPush:
- if pos >= l {
- return nil, ErrNotMatch
- }
- c := components[pos]
- if op.code == utilities.OpLitPush {
- if lit := p.pool[op.operand]; c != lit {
- return nil, ErrNotMatch
- }
- }
- stack = append(stack, c)
- pos++
- case utilities.OpPushM:
- end := len(components)
- if end < pos+p.tailLen {
- return nil, ErrNotMatch
- }
- end -= p.tailLen
- stack = append(stack, strings.Join(components[pos:end], "/"))
- pos = end
- case utilities.OpConcatN:
- n := op.operand
- l := len(stack) - n
- stack = append(stack[:l], strings.Join(stack[l:], "/"))
- case utilities.OpCapture:
- n := len(stack) - 1
- captured[op.operand] = stack[n]
- stack = stack[:n]
- }
- }
- if pos < l {
- return nil, ErrNotMatch
- }
- bindings := make(map[string]string)
- for i, val := range captured {
- bindings[p.vars[i]] = val
- }
- return bindings, nil
-}
-
-// Verb returns the verb part of the Pattern.
-func (p Pattern) Verb() string { return p.verb }
-
-func (p Pattern) String() string {
- var stack []string
- for _, op := range p.ops {
- switch op.code {
- case utilities.OpNop:
- continue
- case utilities.OpPush:
- stack = append(stack, "*")
- case utilities.OpLitPush:
- stack = append(stack, p.pool[op.operand])
- case utilities.OpPushM:
- stack = append(stack, "**")
- case utilities.OpConcatN:
- n := op.operand
- l := len(stack) - n
- stack = append(stack[:l], strings.Join(stack[l:], "/"))
- case utilities.OpCapture:
- n := len(stack) - 1
- stack[n] = fmt.Sprintf("{%s=%s}", p.vars[op.operand], stack[n])
- }
- }
- segs := strings.Join(stack, "/")
- if p.verb != "" {
- return fmt.Sprintf("/%s:%s", segs, p.verb)
- }
- return "/" + segs
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto2_convert.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto2_convert.go
deleted file mode 100644
index a3151e2a5..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto2_convert.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package runtime
-
-import (
- "github.com/golang/protobuf/proto"
-)
-
-// StringP returns a pointer to a string whose pointee is same as the given string value.
-func StringP(val string) (*string, error) {
- return proto.String(val), nil
-}
-
-// BoolP parses the given string representation of a boolean value,
-// and returns a pointer to a bool whose value is same as the parsed value.
-func BoolP(val string) (*bool, error) {
- b, err := Bool(val)
- if err != nil {
- return nil, err
- }
- return proto.Bool(b), nil
-}
-
-// Float64P parses the given string representation of a floating point number,
-// and returns a pointer to a float64 whose value is same as the parsed number.
-func Float64P(val string) (*float64, error) {
- f, err := Float64(val)
- if err != nil {
- return nil, err
- }
- return proto.Float64(f), nil
-}
-
-// Float32P parses the given string representation of a floating point number,
-// and returns a pointer to a float32 whose value is same as the parsed number.
-func Float32P(val string) (*float32, error) {
- f, err := Float32(val)
- if err != nil {
- return nil, err
- }
- return proto.Float32(f), nil
-}
-
-// Int64P parses the given string representation of an integer
-// and returns a pointer to a int64 whose value is same as the parsed integer.
-func Int64P(val string) (*int64, error) {
- i, err := Int64(val)
- if err != nil {
- return nil, err
- }
- return proto.Int64(i), nil
-}
-
-// Int32P parses the given string representation of an integer
-// and returns a pointer to a int32 whose value is same as the parsed integer.
-func Int32P(val string) (*int32, error) {
- i, err := Int32(val)
- if err != nil {
- return nil, err
- }
- return proto.Int32(i), err
-}
-
-// Uint64P parses the given string representation of an integer
-// and returns a pointer to a uint64 whose value is same as the parsed integer.
-func Uint64P(val string) (*uint64, error) {
- i, err := Uint64(val)
- if err != nil {
- return nil, err
- }
- return proto.Uint64(i), err
-}
-
-// Uint32P parses the given string representation of an integer
-// and returns a pointer to a uint32 whose value is same as the parsed integer.
-func Uint32P(val string) (*uint32, error) {
- i, err := Uint32(val)
- if err != nil {
- return nil, err
- }
- return proto.Uint32(i), err
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go
deleted file mode 100644
index b7fa32e45..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/proto_errors.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package runtime
-
-import (
- "io"
- "net/http"
-
- "context"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/status"
-)
-
-// ProtoErrorHandlerFunc handles the error as a gRPC error generated via status package and replies to the request.
-type ProtoErrorHandlerFunc func(context.Context, *ServeMux, Marshaler, http.ResponseWriter, *http.Request, error)
-
-var _ ProtoErrorHandlerFunc = DefaultHTTPProtoErrorHandler
-
-// DefaultHTTPProtoErrorHandler is an implementation of HTTPError.
-// If "err" is an error from gRPC system, the function replies with the status code mapped by HTTPStatusFromCode.
-// If otherwise, it replies with http.StatusInternalServerError.
-//
-// The response body returned by this function is a Status message marshaled by a Marshaler.
-//
-// Do not set this function to HTTPError variable directly, use WithProtoErrorHandler option instead.
-func DefaultHTTPProtoErrorHandler(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, _ *http.Request, err error) {
- // return Internal when Marshal failed
- const fallback = `{"code": 13, "message": "failed to marshal error message"}`
-
- s, ok := status.FromError(err)
- if !ok {
- s = status.New(codes.Unknown, err.Error())
- }
-
- w.Header().Del("Trailer")
-
- contentType := marshaler.ContentType()
- // Check marshaler on run time in order to keep backwards compatability
- // An interface param needs to be added to the ContentType() function on
- // the Marshal interface to be able to remove this check
- if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
- pb := s.Proto()
- contentType = httpBodyMarshaler.ContentTypeFromMessage(pb)
- }
- w.Header().Set("Content-Type", contentType)
-
- buf, merr := marshaler.Marshal(s.Proto())
- if merr != nil {
- grpclog.Infof("Failed to marshal error message %q: %v", s.Proto(), merr)
- w.WriteHeader(http.StatusInternalServerError)
- if _, err := io.WriteString(w, fallback); err != nil {
- grpclog.Infof("Failed to write response: %v", err)
- }
- return
- }
-
- md, ok := ServerMetadataFromContext(ctx)
- if !ok {
- grpclog.Infof("Failed to extract ServerMetadata from context")
- }
-
- handleForwardResponseServerMetadata(w, mux, md)
- handleForwardResponseTrailerHeader(w, md)
- st := HTTPStatusFromCode(s.Code())
- w.WriteHeader(st)
- if _, err := w.Write(buf); err != nil {
- grpclog.Infof("Failed to write response: %v", err)
- }
-
- handleForwardResponseTrailer(w, md)
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go
deleted file mode 100644
index bb9359f17..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/runtime/query.go
+++ /dev/null
@@ -1,392 +0,0 @@
-package runtime
-
-import (
- "encoding/base64"
- "fmt"
- "net/url"
- "reflect"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
- "github.com/grpc-ecosystem/grpc-gateway/utilities"
- "google.golang.org/grpc/grpclog"
-)
-
-// PopulateQueryParameters populates "values" into "msg".
-// A value is ignored if its key starts with one of the elements in "filter".
-func PopulateQueryParameters(msg proto.Message, values url.Values, filter *utilities.DoubleArray) error {
- for key, values := range values {
- re, err := regexp.Compile("^(.*)\\[(.*)\\]$")
- if err != nil {
- return err
- }
- match := re.FindStringSubmatch(key)
- if len(match) == 3 {
- key = match[1]
- values = append([]string{match[2]}, values...)
- }
- fieldPath := strings.Split(key, ".")
- if filter.HasCommonPrefix(fieldPath) {
- continue
- }
- if err := populateFieldValueFromPath(msg, fieldPath, values); err != nil {
- return err
- }
- }
- return nil
-}
-
-// PopulateFieldFromPath sets a value in a nested Protobuf structure.
-// It instantiates missing protobuf fields as it goes.
-func PopulateFieldFromPath(msg proto.Message, fieldPathString string, value string) error {
- fieldPath := strings.Split(fieldPathString, ".")
- return populateFieldValueFromPath(msg, fieldPath, []string{value})
-}
-
-func populateFieldValueFromPath(msg proto.Message, fieldPath []string, values []string) error {
- m := reflect.ValueOf(msg)
- if m.Kind() != reflect.Ptr {
- return fmt.Errorf("unexpected type %T: %v", msg, msg)
- }
- var props *proto.Properties
- m = m.Elem()
- for i, fieldName := range fieldPath {
- isLast := i == len(fieldPath)-1
- if !isLast && m.Kind() != reflect.Struct {
- return fmt.Errorf("non-aggregate type in the mid of path: %s", strings.Join(fieldPath, "."))
- }
- var f reflect.Value
- var err error
- f, props, err = fieldByProtoName(m, fieldName)
- if err != nil {
- return err
- } else if !f.IsValid() {
- grpclog.Infof("field not found in %T: %s", msg, strings.Join(fieldPath, "."))
- return nil
- }
-
- switch f.Kind() {
- case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, reflect.String, reflect.Uint32, reflect.Uint64:
- if !isLast {
- return fmt.Errorf("unexpected nested field %s in %s", fieldPath[i+1], strings.Join(fieldPath[:i+1], "."))
- }
- m = f
- case reflect.Slice:
- if !isLast {
- return fmt.Errorf("unexpected repeated field in %s", strings.Join(fieldPath, "."))
- }
- // Handle []byte
- if f.Type().Elem().Kind() == reflect.Uint8 {
- m = f
- break
- }
- return populateRepeatedField(f, values, props)
- case reflect.Ptr:
- if f.IsNil() {
- m = reflect.New(f.Type().Elem())
- f.Set(m.Convert(f.Type()))
- }
- m = f.Elem()
- continue
- case reflect.Struct:
- m = f
- continue
- case reflect.Map:
- if !isLast {
- return fmt.Errorf("unexpected nested field %s in %s", fieldPath[i+1], strings.Join(fieldPath[:i+1], "."))
- }
- return populateMapField(f, values, props)
- default:
- return fmt.Errorf("unexpected type %s in %T", f.Type(), msg)
- }
- }
- switch len(values) {
- case 0:
- return fmt.Errorf("no value of field: %s", strings.Join(fieldPath, "."))
- case 1:
- default:
- grpclog.Infof("too many field values: %s", strings.Join(fieldPath, "."))
- }
- return populateField(m, values[0], props)
-}
-
-// fieldByProtoName looks up a field whose corresponding protobuf field name is "name".
-// "m" must be a struct value. It returns zero reflect.Value if no such field found.
-func fieldByProtoName(m reflect.Value, name string) (reflect.Value, *proto.Properties, error) {
- props := proto.GetProperties(m.Type())
-
- // look up field name in oneof map
- if op, ok := props.OneofTypes[name]; ok {
- v := reflect.New(op.Type.Elem())
- field := m.Field(op.Field)
- if !field.IsNil() {
- return reflect.Value{}, nil, fmt.Errorf("field already set for %s oneof", props.Prop[op.Field].OrigName)
- }
- field.Set(v)
- return v.Elem().Field(0), op.Prop, nil
- }
-
- for _, p := range props.Prop {
- if p.OrigName == name {
- return m.FieldByName(p.Name), p, nil
- }
- if p.JSONName == name {
- return m.FieldByName(p.Name), p, nil
- }
- }
- return reflect.Value{}, nil, nil
-}
-
-func populateMapField(f reflect.Value, values []string, props *proto.Properties) error {
- if len(values) != 2 {
- return fmt.Errorf("more than one value provided for key %s in map %s", values[0], props.Name)
- }
-
- key, value := values[0], values[1]
- keyType := f.Type().Key()
- valueType := f.Type().Elem()
- if f.IsNil() {
- f.Set(reflect.MakeMap(f.Type()))
- }
-
- keyConv, ok := convFromType[keyType.Kind()]
- if !ok {
- return fmt.Errorf("unsupported key type %s in map %s", keyType, props.Name)
- }
- valueConv, ok := convFromType[valueType.Kind()]
- if !ok {
- return fmt.Errorf("unsupported value type %s in map %s", valueType, props.Name)
- }
-
- keyV := keyConv.Call([]reflect.Value{reflect.ValueOf(key)})
- if err := keyV[1].Interface(); err != nil {
- return err.(error)
- }
- valueV := valueConv.Call([]reflect.Value{reflect.ValueOf(value)})
- if err := valueV[1].Interface(); err != nil {
- return err.(error)
- }
-
- f.SetMapIndex(keyV[0].Convert(keyType), valueV[0].Convert(valueType))
-
- return nil
-}
-
-func populateRepeatedField(f reflect.Value, values []string, props *proto.Properties) error {
- elemType := f.Type().Elem()
-
- // is the destination field a slice of an enumeration type?
- if enumValMap := proto.EnumValueMap(props.Enum); enumValMap != nil {
- return populateFieldEnumRepeated(f, values, enumValMap)
- }
-
- conv, ok := convFromType[elemType.Kind()]
- if !ok {
- return fmt.Errorf("unsupported field type %s", elemType)
- }
- f.Set(reflect.MakeSlice(f.Type(), len(values), len(values)).Convert(f.Type()))
- for i, v := range values {
- result := conv.Call([]reflect.Value{reflect.ValueOf(v)})
- if err := result[1].Interface(); err != nil {
- return err.(error)
- }
- f.Index(i).Set(result[0].Convert(f.Index(i).Type()))
- }
- return nil
-}
-
-func populateField(f reflect.Value, value string, props *proto.Properties) error {
- i := f.Addr().Interface()
-
- // Handle protobuf well known types
- type wkt interface {
- XXX_WellKnownType() string
- }
- if wkt, ok := i.(wkt); ok {
- switch wkt.XXX_WellKnownType() {
- case "Timestamp":
- if value == "null" {
- f.Field(0).SetInt(0)
- f.Field(1).SetInt(0)
- return nil
- }
-
- t, err := time.Parse(time.RFC3339Nano, value)
- if err != nil {
- return fmt.Errorf("bad Timestamp: %v", err)
- }
- f.Field(0).SetInt(int64(t.Unix()))
- f.Field(1).SetInt(int64(t.Nanosecond()))
- return nil
- case "Duration":
- if value == "null" {
- f.Field(0).SetInt(0)
- f.Field(1).SetInt(0)
- return nil
- }
- d, err := time.ParseDuration(value)
- if err != nil {
- return fmt.Errorf("bad Duration: %v", err)
- }
-
- ns := d.Nanoseconds()
- s := ns / 1e9
- ns %= 1e9
- f.Field(0).SetInt(s)
- f.Field(1).SetInt(ns)
- return nil
- case "DoubleValue":
- fallthrough
- case "FloatValue":
- float64Val, err := strconv.ParseFloat(value, 64)
- if err != nil {
- return fmt.Errorf("bad DoubleValue: %s", value)
- }
- f.Field(0).SetFloat(float64Val)
- return nil
- case "Int64Value":
- fallthrough
- case "Int32Value":
- int64Val, err := strconv.ParseInt(value, 10, 64)
- if err != nil {
- return fmt.Errorf("bad DoubleValue: %s", value)
- }
- f.Field(0).SetInt(int64Val)
- return nil
- case "UInt64Value":
- fallthrough
- case "UInt32Value":
- uint64Val, err := strconv.ParseUint(value, 10, 64)
- if err != nil {
- return fmt.Errorf("bad DoubleValue: %s", value)
- }
- f.Field(0).SetUint(uint64Val)
- return nil
- case "BoolValue":
- if value == "true" {
- f.Field(0).SetBool(true)
- } else if value == "false" {
- f.Field(0).SetBool(false)
- } else {
- return fmt.Errorf("bad BoolValue: %s", value)
- }
- return nil
- case "StringValue":
- f.Field(0).SetString(value)
- return nil
- case "BytesValue":
- bytesVal, err := base64.StdEncoding.DecodeString(value)
- if err != nil {
- return fmt.Errorf("bad BytesValue: %s", value)
- }
- f.Field(0).SetBytes(bytesVal)
- return nil
- }
- }
-
- // Handle google well known types
- if gwkt, ok := i.(proto.Message); ok {
- switch proto.MessageName(gwkt) {
- case "google.protobuf.FieldMask":
- p := f.Field(0)
- for _, v := range strings.Split(value, ",") {
- if v != "" {
- p.Set(reflect.Append(p, reflect.ValueOf(v)))
- }
- }
- return nil
- }
- }
-
- // Handle Time and Duration stdlib types
- switch t := i.(type) {
- case *time.Time:
- pt, err := time.Parse(time.RFC3339Nano, value)
- if err != nil {
- return fmt.Errorf("bad Timestamp: %v", err)
- }
- *t = pt
- return nil
- case *time.Duration:
- d, err := time.ParseDuration(value)
- if err != nil {
- return fmt.Errorf("bad Duration: %v", err)
- }
- *t = d
- return nil
- }
-
- // is the destination field an enumeration type?
- if enumValMap := proto.EnumValueMap(props.Enum); enumValMap != nil {
- return populateFieldEnum(f, value, enumValMap)
- }
-
- conv, ok := convFromType[f.Kind()]
- if !ok {
- return fmt.Errorf("field type %T is not supported in query parameters", i)
- }
- result := conv.Call([]reflect.Value{reflect.ValueOf(value)})
- if err := result[1].Interface(); err != nil {
- return err.(error)
- }
- f.Set(result[0].Convert(f.Type()))
- return nil
-}
-
-func convertEnum(value string, t reflect.Type, enumValMap map[string]int32) (reflect.Value, error) {
- // see if it's an enumeration string
- if enumVal, ok := enumValMap[value]; ok {
- return reflect.ValueOf(enumVal).Convert(t), nil
- }
-
- // check for an integer that matches an enumeration value
- eVal, err := strconv.Atoi(value)
- if err != nil {
- return reflect.Value{}, fmt.Errorf("%s is not a valid %s", value, t)
- }
- for _, v := range enumValMap {
- if v == int32(eVal) {
- return reflect.ValueOf(eVal).Convert(t), nil
- }
- }
- return reflect.Value{}, fmt.Errorf("%s is not a valid %s", value, t)
-}
-
-func populateFieldEnum(f reflect.Value, value string, enumValMap map[string]int32) error {
- cval, err := convertEnum(value, f.Type(), enumValMap)
- if err != nil {
- return err
- }
- f.Set(cval)
- return nil
-}
-
-func populateFieldEnumRepeated(f reflect.Value, values []string, enumValMap map[string]int32) error {
- elemType := f.Type().Elem()
- f.Set(reflect.MakeSlice(f.Type(), len(values), len(values)).Convert(f.Type()))
- for i, v := range values {
- result, err := convertEnum(v, elemType, enumValMap)
- if err != nil {
- return err
- }
- f.Index(i).Set(result)
- }
- return nil
-}
-
-var (
- convFromType = map[reflect.Kind]reflect.Value{
- reflect.String: reflect.ValueOf(String),
- reflect.Bool: reflect.ValueOf(Bool),
- reflect.Float64: reflect.ValueOf(Float64),
- reflect.Float32: reflect.ValueOf(Float32),
- reflect.Int64: reflect.ValueOf(Int64),
- reflect.Int32: reflect.ValueOf(Int32),
- reflect.Uint64: reflect.ValueOf(Uint64),
- reflect.Uint32: reflect.ValueOf(Uint32),
- reflect.Slice: reflect.ValueOf(Bytes),
- }
-)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel
deleted file mode 100644
index 7109d7932..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/BUILD.bazel
+++ /dev/null
@@ -1,21 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(default_visibility = ["//visibility:public"])
-
-go_library(
- name = "go_default_library",
- srcs = [
- "doc.go",
- "pattern.go",
- "readerfactory.go",
- "trie.go",
- ],
- importpath = "github.com/grpc-ecosystem/grpc-gateway/utilities",
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = ["trie_test.go"],
- embed = [":go_default_library"],
-)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go
deleted file mode 100644
index cf79a4d58..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go
+++ /dev/null
@@ -1,2 +0,0 @@
-// Package utilities provides members for internal use in grpc-gateway.
-package utilities
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go
deleted file mode 100644
index dfe7de486..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package utilities
-
-// An OpCode is a opcode of compiled path patterns.
-type OpCode int
-
-// These constants are the valid values of OpCode.
-const (
- // OpNop does nothing
- OpNop = OpCode(iota)
- // OpPush pushes a component to stack
- OpPush
- // OpLitPush pushes a component to stack if it matches to the literal
- OpLitPush
- // OpPushM concatenates the remaining components and pushes it to stack
- OpPushM
- // OpConcatN pops N items from stack, concatenates them and pushes it back to stack
- OpConcatN
- // OpCapture pops an item and binds it to the variable
- OpCapture
- // OpEnd is the least positive invalid opcode.
- OpEnd
-)
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/readerfactory.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/readerfactory.go
deleted file mode 100644
index 6dd385466..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/readerfactory.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package utilities
-
-import (
- "bytes"
- "io"
- "io/ioutil"
-)
-
-// IOReaderFactory takes in an io.Reader and returns a function that will allow you to create a new reader that begins
-// at the start of the stream
-func IOReaderFactory(r io.Reader) (func() io.Reader, error) {
- b, err := ioutil.ReadAll(r)
- if err != nil {
- return nil, err
- }
-
- return func() io.Reader {
- return bytes.NewReader(b)
- }, nil
-}
diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go
deleted file mode 100644
index c2b7b30dd..000000000
--- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go
+++ /dev/null
@@ -1,177 +0,0 @@
-package utilities
-
-import (
- "sort"
-)
-
-// DoubleArray is a Double Array implementation of trie on sequences of strings.
-type DoubleArray struct {
- // Encoding keeps an encoding from string to int
- Encoding map[string]int
- // Base is the base array of Double Array
- Base []int
- // Check is the check array of Double Array
- Check []int
-}
-
-// NewDoubleArray builds a DoubleArray from a set of sequences of strings.
-func NewDoubleArray(seqs [][]string) *DoubleArray {
- da := &DoubleArray{Encoding: make(map[string]int)}
- if len(seqs) == 0 {
- return da
- }
-
- encoded := registerTokens(da, seqs)
- sort.Sort(byLex(encoded))
-
- root := node{row: -1, col: -1, left: 0, right: len(encoded)}
- addSeqs(da, encoded, 0, root)
-
- for i := len(da.Base); i > 0; i-- {
- if da.Check[i-1] != 0 {
- da.Base = da.Base[:i]
- da.Check = da.Check[:i]
- break
- }
- }
- return da
-}
-
-func registerTokens(da *DoubleArray, seqs [][]string) [][]int {
- var result [][]int
- for _, seq := range seqs {
- var encoded []int
- for _, token := range seq {
- if _, ok := da.Encoding[token]; !ok {
- da.Encoding[token] = len(da.Encoding)
- }
- encoded = append(encoded, da.Encoding[token])
- }
- result = append(result, encoded)
- }
- for i := range result {
- result[i] = append(result[i], len(da.Encoding))
- }
- return result
-}
-
-type node struct {
- row, col int
- left, right int
-}
-
-func (n node) value(seqs [][]int) int {
- return seqs[n.row][n.col]
-}
-
-func (n node) children(seqs [][]int) []*node {
- var result []*node
- lastVal := int(-1)
- last := new(node)
- for i := n.left; i < n.right; i++ {
- if lastVal == seqs[i][n.col+1] {
- continue
- }
- last.right = i
- last = &node{
- row: i,
- col: n.col + 1,
- left: i,
- }
- result = append(result, last)
- }
- last.right = n.right
- return result
-}
-
-func addSeqs(da *DoubleArray, seqs [][]int, pos int, n node) {
- ensureSize(da, pos)
-
- children := n.children(seqs)
- var i int
- for i = 1; ; i++ {
- ok := func() bool {
- for _, child := range children {
- code := child.value(seqs)
- j := i + code
- ensureSize(da, j)
- if da.Check[j] != 0 {
- return false
- }
- }
- return true
- }()
- if ok {
- break
- }
- }
- da.Base[pos] = i
- for _, child := range children {
- code := child.value(seqs)
- j := i + code
- da.Check[j] = pos + 1
- }
- terminator := len(da.Encoding)
- for _, child := range children {
- code := child.value(seqs)
- if code == terminator {
- continue
- }
- j := i + code
- addSeqs(da, seqs, j, *child)
- }
-}
-
-func ensureSize(da *DoubleArray, i int) {
- for i >= len(da.Base) {
- da.Base = append(da.Base, make([]int, len(da.Base)+1)...)
- da.Check = append(da.Check, make([]int, len(da.Check)+1)...)
- }
-}
-
-type byLex [][]int
-
-func (l byLex) Len() int { return len(l) }
-func (l byLex) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
-func (l byLex) Less(i, j int) bool {
- si := l[i]
- sj := l[j]
- var k int
- for k = 0; k < len(si) && k < len(sj); k++ {
- if si[k] < sj[k] {
- return true
- }
- if si[k] > sj[k] {
- return false
- }
- }
- if k < len(sj) {
- return true
- }
- return false
-}
-
-// HasCommonPrefix determines if any sequence in the DoubleArray is a prefix of the given sequence.
-func (da *DoubleArray) HasCommonPrefix(seq []string) bool {
- if len(da.Base) == 0 {
- return false
- }
-
- var i int
- for _, t := range seq {
- code, ok := da.Encoding[t]
- if !ok {
- break
- }
- j := da.Base[i] + code
- if len(da.Check) <= j || da.Check[j] != i+1 {
- break
- }
- i = j
- }
- j := da.Base[i] + len(da.Encoding)
- if len(da.Check) <= j || da.Check[j] != i+1 {
- return false
- }
- return true
-}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method.go
index cbc73d562..d011ec53e 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method.go
@@ -2,7 +2,6 @@ package authentication
import (
"github.com/Azure/go-autorest/autorest"
- "github.com/Azure/go-autorest/autorest/adal"
)
type authMethod interface {
@@ -10,7 +9,7 @@ type authMethod interface {
isApplicable(b Builder) bool
- getAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error)
+ getAuthorizationToken(sender autorest.Sender, oauthConfig *OAuthConfig, endpoint string) (autorest.Authorizer, error)
name() string
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_azure_cli_token.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_azure_cli_token.go
index 184037d1b..279c42f74 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_azure_cli_token.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_azure_cli_token.go
@@ -14,7 +14,7 @@ import (
)
type azureCliTokenAuth struct {
- profile *azureCLIProfile
+ profile *azureCLIProfile
servicePrincipalAuthDocsLink string
}
@@ -68,7 +68,11 @@ func (a azureCliTokenAuth) isApplicable(b Builder) bool {
return b.SupportsAzureCliToken
}
-func (a azureCliTokenAuth) getAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error) {
+func (a azureCliTokenAuth) getAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ if oauth.OAuth == nil {
+ return nil, fmt.Errorf("Error getting Authorization Token for cli auth: an OAuth token wasn't configured correctly; please file a bug with more details")
+ }
+
// the Azure CLI appears to cache these, so to maintain compatibility with the interface this method is intentionally not on the pointer
token, err := obtainAuthorizationToken(endpoint, a.profile.subscriptionId)
if err != nil {
@@ -80,7 +84,7 @@ func (a azureCliTokenAuth) getAuthorizationToken(sender autorest.Sender, oauthCo
return nil, fmt.Errorf("Error converting Authorization Token to an ADAL Token: %s", err)
}
- spt, err := adal.NewServicePrincipalTokenFromManualToken(*oauthConfig, a.profile.clientId, endpoint, adalToken)
+ spt, err := adal.NewServicePrincipalTokenFromManualToken(*oauth.OAuth, a.profile.clientId, endpoint, adalToken)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_cert.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_cert.go
index f6dd9b77e..6abc9461e 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_cert.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_cert.go
@@ -41,7 +41,11 @@ func (a servicePrincipalClientCertificateAuth) name() string {
return "Service Principal / Client Certificate"
}
-func (a servicePrincipalClientCertificateAuth) getAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error) {
+func (a servicePrincipalClientCertificateAuth) getAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ if oauth.OAuth == nil {
+ return nil, fmt.Errorf("Error getting Authorization Token for client cert: an OAuth token wasn't configured correctly; please file a bug with more details")
+ }
+
certificateData, err := ioutil.ReadFile(a.clientCertPath)
if err != nil {
return nil, fmt.Errorf("Error reading Client Certificate %q: %v", a.clientCertPath, err)
@@ -53,7 +57,7 @@ func (a servicePrincipalClientCertificateAuth) getAuthorizationToken(sender auto
return nil, fmt.Errorf("Error decoding pkcs12 certificate: %v", err)
}
- spt, err := adal.NewServicePrincipalTokenFromCertificate(*oauthConfig, a.clientId, certificate, rsaPrivateKey, endpoint)
+ spt, err := adal.NewServicePrincipalTokenFromCertificate(*oauth.OAuth, a.clientId, certificate, rsaPrivateKey, endpoint)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret.go
index f31bc20e8..11ab2716d 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret.go
@@ -33,15 +33,18 @@ func (a servicePrincipalClientSecretAuth) name() string {
return "Service Principal / Client Secret"
}
-func (a servicePrincipalClientSecretAuth) getAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error) {
- spt, err := adal.NewServicePrincipalToken(*oauthConfig, a.clientId, a.clientSecret, endpoint)
+func (a servicePrincipalClientSecretAuth) getAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ if oauth.OAuth == nil {
+ return nil, fmt.Errorf("Error getting Authorization Token for client secret auth: an OAuth token wasn't configured correctly; please file a bug with more details")
+ }
+
+ spt, err := adal.NewServicePrincipalToken(*oauth.OAuth, a.clientId, a.clientSecret, endpoint)
if err != nil {
return nil, err
}
spt.SetSender(sender)
- auth := autorest.NewBearerAuthorizer(spt)
- return auth, nil
+ return autorest.NewBearerAuthorizer(spt), nil
}
func (a servicePrincipalClientSecretAuth) populateConfig(c *Config) error {
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret_multi_tenant.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret_multi_tenant.go
new file mode 100644
index 000000000..85de353b0
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_client_secret_multi_tenant.go
@@ -0,0 +1,83 @@
+package authentication
+
+import (
+ "fmt"
+
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/Azure/go-autorest/autorest/adal"
+ "github.com/hashicorp/go-multierror"
+)
+
+type servicePrincipalClientSecretMultiTenantAuth struct {
+ clientId string
+ clientSecret string
+ subscriptionId string
+ tenantId string
+ auxiliaryTenantIDs []string
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) build(b Builder) (authMethod, error) {
+ method := servicePrincipalClientSecretMultiTenantAuth{
+ clientId: b.ClientID,
+ clientSecret: b.ClientSecret,
+ subscriptionId: b.SubscriptionID,
+ tenantId: b.TenantID,
+ auxiliaryTenantIDs: b.AuxiliaryTenantIDs,
+ }
+ return method, nil
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) isApplicable(b Builder) bool {
+ return b.SupportsClientSecretAuth && b.ClientSecret != "" && b.SupportsAuxiliaryTenants && (len(b.AuxiliaryTenantIDs) > 0)
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) name() string {
+ return "Multi Tenant Service Principal / Client Secret"
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) getAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ if oauth.MultiTenantOauth == nil {
+ return nil, fmt.Errorf("Error getting Authorization Token for client cert: an MultiTenantOauth token wasn't configured correctly; please file a bug with more details")
+ }
+
+ spt, err := adal.NewMultiTenantServicePrincipalToken(*oauth.MultiTenantOauth, a.clientId, a.clientSecret, endpoint)
+ if err != nil {
+ return nil, err
+ }
+
+ spt.PrimaryToken.SetSender(sender)
+ for _, t := range spt.AuxiliaryTokens {
+ t.SetSender(sender)
+ }
+
+ auth := autorest.NewMultiTenantServicePrincipalTokenAuthorizer(spt)
+ return auth, nil
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) populateConfig(c *Config) error {
+ c.AuthenticatedAsAServicePrincipal = true
+ return nil
+}
+
+func (a servicePrincipalClientSecretMultiTenantAuth) validate() error {
+ var err *multierror.Error
+
+ fmtErrorMessage := "A %s must be configured when authenticating as a Service Principal using a Multi Tenant Client Secret."
+
+ if a.subscriptionId == "" {
+ err = multierror.Append(err, fmt.Errorf(fmtErrorMessage, "Subscription ID"))
+ }
+ if a.clientId == "" {
+ err = multierror.Append(err, fmt.Errorf(fmtErrorMessage, "Client ID"))
+ }
+ if a.clientSecret == "" {
+ err = multierror.Append(err, fmt.Errorf(fmtErrorMessage, "Client Secret"))
+ }
+ if a.tenantId == "" {
+ err = multierror.Append(err, fmt.Errorf(fmtErrorMessage, "Tenant ID"))
+ }
+ if len(a.auxiliaryTenantIDs) == 0 {
+ err = multierror.Append(err, fmt.Errorf(fmtErrorMessage, "Auxiliary Tenant IDs"))
+ }
+ return err.ErrorOrNil()
+}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_msi.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_msi.go
index 883df2607..0e33924f5 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_msi.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/auth_method_msi.go
@@ -39,7 +39,11 @@ func (a managedServiceIdentityAuth) name() string {
return "Managed Service Identity"
}
-func (a managedServiceIdentityAuth) getAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error) {
+func (a managedServiceIdentityAuth) getAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ if oauth.OAuth == nil {
+ return nil, fmt.Errorf("Error getting Authorization Token for MSI auth: an OAuth token wasn't configured correctly; please file a bug with more details")
+ }
+
spt, err := adal.NewServicePrincipalTokenFromMSI(a.endpoint, endpoint)
if err != nil {
return nil, err
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_access_token.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_access_token.go
index 822fb2d77..bc826d557 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_access_token.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_access_token.go
@@ -10,8 +10,8 @@ import (
)
type azureCliAccessToken struct {
- ClientID string
- AccessToken *adal.Token
+ ClientID string
+ AccessToken *adal.Token
}
func findValidAccessTokenForTenant(tokens []cli.Token, tenantId string) (*azureCliAccessToken, error) {
@@ -32,8 +32,8 @@ func findValidAccessTokenForTenant(tokens []cli.Token, tenantId string) (*azureC
}
validAccessToken := azureCliAccessToken{
- ClientID: accessToken.ClientID,
- AccessToken: &token,
+ ClientID: accessToken.ClientID,
+ AccessToken: &token,
}
return &validAccessToken, nil
}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_profile.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_profile.go
index 187602401..1f6a0b6af 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_profile.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/azure_cli_profile.go
@@ -9,10 +9,10 @@ import (
type azureCLIProfile struct {
profile cli.Profile
- clientId string
- environment string
- subscriptionId string
- tenantId string
+ clientId string
+ environment string
+ subscriptionId string
+ tenantId string
}
func (a *azureCLIProfile) populateFields() error {
@@ -49,4 +49,4 @@ func (a *azureCLIProfile) verifyAuthenticatedAsAUser() bool {
}
return false
-}
\ No newline at end of file
+}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/builder.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/builder.go
index 8a187c6bb..71400ea14 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/builder.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/builder.go
@@ -14,6 +14,10 @@ type Builder struct {
TenantID string
Environment string
+ // Auxiliary tenant IDs used for multi tenant auth
+ SupportsAuxiliaryTenants bool
+ AuxiliaryTenantIDs []string
+
// The custom Resource Manager Endpoint which should be used
// only applicable for Azure Stack at this time.
CustomResourceManagerEndpoint string
@@ -43,6 +47,7 @@ func (b Builder) Build() (*Config, error) {
ClientID: b.ClientID,
SubscriptionID: b.SubscriptionID,
TenantID: b.TenantID,
+ AuxiliaryTenantIDs: b.AuxiliaryTenantIDs,
Environment: b.Environment,
CustomResourceManagerEndpoint: b.CustomResourceManagerEndpoint,
}
@@ -51,6 +56,7 @@ func (b Builder) Build() (*Config, error) {
// since the Azure CLI Parsing should always be the last thing checked
supportedAuthenticationMethods := []authMethod{
servicePrincipalClientCertificateAuth{},
+ servicePrincipalClientSecretMultiTenantAuth{},
servicePrincipalClientSecretAuth{},
managedServiceIdentityAuth{},
azureCliTokenAuth{},
@@ -59,23 +65,28 @@ func (b Builder) Build() (*Config, error) {
for _, method := range supportedAuthenticationMethods {
name := method.name()
log.Printf("Testing if %s is applicable for Authentication..", name)
- if method.isApplicable(b) {
- log.Printf("Using %s for Authentication", name)
- auth, err := method.build(b)
- if err != nil {
- return nil, err
- }
- // populate authentication specific fields on the Config
- // (e.g. is service principal, fields parsed from the azure cli)
- err = auth.populateConfig(&config)
- if err != nil {
- return nil, err
- }
-
- config.authMethod = auth
- return config.validate()
+ // does not support it via validate?
+ if !method.isApplicable(b) {
+ continue
}
+
+ log.Printf("Using %s for Authentication", name)
+ auth, err := method.build(b)
+ if err != nil {
+ return nil, err
+ }
+
+ // populate authentication specific fields on the Config
+ // (e.g. is service principal, fields parsed from the azure cli)
+ err = auth.populateConfig(&config)
+ if err != nil {
+ return nil, err
+ }
+
+ config.authMethod = auth
+ return &config, config.authMethod.validate()
+
}
return nil, fmt.Errorf("No supported authentication methods were found!")
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/authentication/config.go b/vendor/github.com/hashicorp/go-azure-helpers/authentication/config.go
index 90b78fce0..93843c3b7 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/authentication/config.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/authentication/config.go
@@ -1,6 +1,9 @@
package authentication
import (
+ `fmt`
+ "log"
+
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/adal"
)
@@ -11,6 +14,7 @@ type Config struct {
ClientID string
SubscriptionID string
TenantID string
+ AuxiliaryTenantIDs []string
Environment string
AuthenticatedAsAServicePrincipal bool
@@ -21,16 +25,87 @@ type Config struct {
authMethod authMethod
}
-// GetAuthorizationToken returns an authorization token for the authentication method defined in the Config
-func (c Config) GetAuthorizationToken(sender autorest.Sender, oauthConfig *adal.OAuthConfig, endpoint string) (*autorest.BearerAuthorizer, error) {
- return c.authMethod.getAuthorizationToken(sender, oauthConfig, endpoint)
+type OAuthConfig struct {
+ OAuth *adal.OAuthConfig
+ MultiTenantOauth *adal.MultiTenantOAuthConfig
}
-func (c Config) validate() (*Config, error) {
- err := c.authMethod.validate()
+// GetAuthorizationToken returns an authorization token for the authentication method defined in the Config
+func (c Config) GetOAuthConfig(activeDirectoryEndpoint string) (*adal.OAuthConfig, error) {
+ log.Printf("Getting OAuth config for endpoint %s with tenant %s", activeDirectoryEndpoint, c.TenantID)
+ oauth, err := adal.NewOAuthConfig(activeDirectoryEndpoint, c.TenantID)
if err != nil {
return nil, err
}
- return &c, nil
+ // OAuthConfigForTenant returns a pointer, which can be nil.
+ if oauth == nil {
+ return nil, fmt.Errorf("Unable to configure OAuthConfig for tenant %s", c.TenantID)
+ }
+
+ return oauth, nil
+}
+
+// GetMultiTenantOAuthConfig returns a multi-tenant authorization token for the authentication method defined in the Config
+func (c Config) GetMultiTenantOAuthConfig(activeDirectoryEndpoint string) (*adal.MultiTenantOAuthConfig, error) {
+ log.Printf("Getting multi OAuth config for endpoint %s with tenant %s (aux tenants: %v)", activeDirectoryEndpoint, c.TenantID, c.AuxiliaryTenantIDs)
+ oauth, err := adal.NewMultiTenantOAuthConfig(activeDirectoryEndpoint, c.TenantID, c.AuxiliaryTenantIDs, adal.OAuthOptions{})
+ if err != nil {
+ return nil, err
+ }
+
+ // OAuthConfigForTenant returns a pointer, which can be nil.
+ if oauth == nil {
+ return nil, fmt.Errorf("Unable to configure OAuthConfig for tenant %s (auxiliary tenants %v)", c.TenantID, c.AuxiliaryTenantIDs)
+ }
+
+ return &oauth, nil
+}
+
+// BuildOAuthConfig builds the authorization configuration for the specified Active Directory Endpoint
+func (c Config) BuildOAuthConfig(activeDirectoryEndpoint string) (*OAuthConfig, error) {
+ multiAuth := OAuthConfig{}
+ var err error
+
+ multiAuth.OAuth, err = c.GetOAuthConfig(activeDirectoryEndpoint)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(c.AuxiliaryTenantIDs) > 0 {
+ multiAuth.MultiTenantOauth, err = c.GetMultiTenantOAuthConfig(activeDirectoryEndpoint)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return &multiAuth, nil
+}
+
+// BearerAuthorizerCallback returns a BearerAuthorizer valid only for the Primary Tenant
+// this signs a request using the AccessToken returned from the primary Resource Manager authorizer
+func (c Config) BearerAuthorizerCallback(sender autorest.Sender, oauthConfig *OAuthConfig) *autorest.BearerAuthorizerCallback {
+ return autorest.NewBearerAuthorizerCallback(sender, func(tenantID, resource string) (*autorest.BearerAuthorizer, error) {
+ // a BearerAuthorizer is only valid for the primary tenant
+ newAuthConfig := &OAuthConfig{
+ OAuth: oauthConfig.OAuth,
+ }
+
+ storageSpt, err := c.GetAuthorizationToken(sender, newAuthConfig, resource)
+ if err != nil {
+ return nil, err
+ }
+
+ cast, ok := storageSpt.(*autorest.BearerAuthorizer)
+ if !ok {
+ return nil, fmt.Errorf("Error converting %+v to a BearerAuthorizer", storageSpt)
+ }
+
+ return cast, nil
+ })
+}
+
+// GetAuthorizationToken returns an authorization token for the authentication method defined in the Config
+func (c Config) GetAuthorizationToken(sender autorest.Sender, oauth *OAuthConfig, endpoint string) (autorest.Authorizer, error) {
+ return c.authMethod.getAuthorizationToken(sender, oauth, endpoint)
}
diff --git a/vendor/github.com/hashicorp/go-azure-helpers/storage/sas_token.go b/vendor/github.com/hashicorp/go-azure-helpers/storage/sas_token.go
index 205baaeee..62585a34a 100644
--- a/vendor/github.com/hashicorp/go-azure-helpers/storage/sas_token.go
+++ b/vendor/github.com/hashicorp/go-azure-helpers/storage/sas_token.go
@@ -10,8 +10,9 @@ import (
)
const (
- connStringAccountKeyKey = "AccountKey"
- connStringAccountNameKey = "AccountName"
+ connStringAccountKeyKey = "AccountKey"
+ connStringAccountNameKey = "AccountName"
+ blobContainerSignedVersion = "2018-11-09"
)
// ComputeAccountSASToken computes the SAS Token for a Storage Account based on the
@@ -67,6 +68,95 @@ func ComputeAccountSASToken(accountName string,
return sasToken, nil
}
+func ComputeContainerSASToken(signedPermissions string,
+ signedStart string,
+ signedExpiry string,
+ accountName string,
+ accountKey string,
+ containerName string,
+ signedIdentifier string,
+ signedIp string,
+ signedProtocol string,
+ signedSnapshotTime string,
+ cacheControl string,
+ contentDisposition string,
+ contentEncoding string,
+ contentLanguage string,
+ contentType string,
+) (string, error) {
+
+ canonicalizedResource := "/blob/" + accountName + "/" + containerName
+ signedVersion := blobContainerSignedVersion
+ signedResource := "c" // c for container
+
+ // UTF-8 by default...
+ stringToSign := signedPermissions + "\n"
+ stringToSign += signedStart + "\n"
+ stringToSign += signedExpiry + "\n"
+ stringToSign += canonicalizedResource + "\n"
+ stringToSign += signedIdentifier + "\n"
+ stringToSign += signedIp + "\n"
+ stringToSign += signedProtocol + "\n"
+ stringToSign += signedVersion + "\n"
+ stringToSign += signedResource + "\n"
+ stringToSign += signedSnapshotTime + "\n"
+ stringToSign += cacheControl + "\n"
+ stringToSign += contentDisposition + "\n"
+ stringToSign += contentEncoding + "\n"
+ stringToSign += contentLanguage + "\n"
+ stringToSign += contentType
+
+ binaryKey, err := base64.StdEncoding.DecodeString(accountKey)
+ if err != nil {
+ return "", err
+ }
+ hasher := hmac.New(sha256.New, binaryKey)
+ hasher.Write([]byte(stringToSign))
+ signature := hasher.Sum(nil)
+
+ sasToken := "?sv=" + signedVersion
+ sasToken += "&sr=" + signedResource
+ sasToken += "&st=" + url.QueryEscape(signedStart)
+ sasToken += "&se=" + url.QueryEscape(signedExpiry)
+ sasToken += "&sp=" + signedPermissions
+
+ if len(signedIp) > 0 {
+ sasToken += "&sip=" + signedIp
+ }
+
+ if len(signedProtocol) > 0 {
+ sasToken += "&spr=" + signedProtocol
+ }
+
+ if len(signedIdentifier) > 0 {
+ sasToken += "&si=" + signedIdentifier
+ }
+
+ if len(cacheControl) > 0 {
+ sasToken += "&rscc=" + url.QueryEscape(cacheControl)
+ }
+
+ if len(contentDisposition) > 0 {
+ sasToken += "&rscd=" + url.QueryEscape(contentDisposition)
+ }
+
+ if len(contentEncoding) > 0 {
+ sasToken += "&rsce=" + url.QueryEscape(contentEncoding)
+ }
+
+ if len(contentLanguage) > 0 {
+ sasToken += "&rscl=" + url.QueryEscape(contentLanguage)
+ }
+
+ if len(contentType) > 0 {
+ sasToken += "&rsct=" + url.QueryEscape(contentType)
+ }
+
+ sasToken += "&sig=" + url.QueryEscape(base64.StdEncoding.EncodeToString(signature))
+
+ return sasToken, nil
+}
+
// ParseAccountSASConnectionString parses the Connection String for a Storage Account
func ParseAccountSASConnectionString(connString string) (map[string]string, error) {
// This connection string was for a real storage account which has been deleted
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/client.go b/vendor/go.opencensus.io/plugin/ocgrpc/client.go
deleted file mode 100644
index a6c466ae8..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/client.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocgrpc
-
-import (
- "go.opencensus.io/trace"
- "golang.org/x/net/context"
-
- "google.golang.org/grpc/stats"
-)
-
-// ClientHandler implements a gRPC stats.Handler for recording OpenCensus stats and
-// traces. Use with gRPC clients only.
-type ClientHandler struct {
- // StartOptions allows configuring the StartOptions used to create new spans.
- //
- // StartOptions.SpanKind will always be set to trace.SpanKindClient
- // for spans started by this handler.
- StartOptions trace.StartOptions
-}
-
-// HandleConn exists to satisfy gRPC stats.Handler.
-func (c *ClientHandler) HandleConn(ctx context.Context, cs stats.ConnStats) {
- // no-op
-}
-
-// TagConn exists to satisfy gRPC stats.Handler.
-func (c *ClientHandler) TagConn(ctx context.Context, cti *stats.ConnTagInfo) context.Context {
- // no-op
- return ctx
-}
-
-// HandleRPC implements per-RPC tracing and stats instrumentation.
-func (c *ClientHandler) HandleRPC(ctx context.Context, rs stats.RPCStats) {
- traceHandleRPC(ctx, rs)
- statsHandleRPC(ctx, rs)
-}
-
-// TagRPC implements per-RPC context management.
-func (c *ClientHandler) TagRPC(ctx context.Context, rti *stats.RPCTagInfo) context.Context {
- ctx = c.traceTagRPC(ctx, rti)
- ctx = c.statsTagRPC(ctx, rti)
- return ctx
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/client_metrics.go b/vendor/go.opencensus.io/plugin/ocgrpc/client_metrics.go
deleted file mode 100644
index abe978b67..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/client_metrics.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package ocgrpc
-
-import (
- "go.opencensus.io/stats"
- "go.opencensus.io/stats/view"
- "go.opencensus.io/tag"
-)
-
-// The following variables are measures are recorded by ClientHandler:
-var (
- ClientSentMessagesPerRPC = stats.Int64("grpc.io/client/sent_messages_per_rpc", "Number of messages sent in the RPC (always 1 for non-streaming RPCs).", stats.UnitDimensionless)
- ClientSentBytesPerRPC = stats.Int64("grpc.io/client/sent_bytes_per_rpc", "Total bytes sent across all request messages per RPC.", stats.UnitBytes)
- ClientReceivedMessagesPerRPC = stats.Int64("grpc.io/client/received_messages_per_rpc", "Number of response messages received per RPC (always 1 for non-streaming RPCs).", stats.UnitDimensionless)
- ClientReceivedBytesPerRPC = stats.Int64("grpc.io/client/received_bytes_per_rpc", "Total bytes received across all response messages per RPC.", stats.UnitBytes)
- ClientRoundtripLatency = stats.Float64("grpc.io/client/roundtrip_latency", "Time between first byte of request sent to last byte of response received, or terminal error.", stats.UnitMilliseconds)
- ClientServerLatency = stats.Float64("grpc.io/client/server_latency", `Propagated from the server and should have the same value as "grpc.io/server/latency".`, stats.UnitMilliseconds)
-)
-
-// Predefined views may be registered to collect data for the above measures.
-// As always, you may also define your own custom views over measures collected by this
-// package. These are declared as a convenience only; none are registered by
-// default.
-var (
- ClientSentBytesPerRPCView = &view.View{
- Measure: ClientSentBytesPerRPC,
- Name: "grpc.io/client/sent_bytes_per_rpc",
- Description: "Distribution of bytes sent per RPC, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultBytesDistribution,
- }
-
- ClientReceivedBytesPerRPCView = &view.View{
- Measure: ClientReceivedBytesPerRPC,
- Name: "grpc.io/client/received_bytes_per_rpc",
- Description: "Distribution of bytes received per RPC, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultBytesDistribution,
- }
-
- ClientRoundtripLatencyView = &view.View{
- Measure: ClientRoundtripLatency,
- Name: "grpc.io/client/roundtrip_latency",
- Description: "Distribution of round-trip latency, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultMillisecondsDistribution,
- }
-
- ClientCompletedRPCsView = &view.View{
- Measure: ClientRoundtripLatency,
- Name: "grpc.io/client/completed_rpcs",
- Description: "Count of RPCs by method and status.",
- TagKeys: []tag.Key{KeyClientMethod, KeyClientStatus},
- Aggregation: view.Count(),
- }
-
- ClientSentMessagesPerRPCView = &view.View{
- Measure: ClientSentMessagesPerRPC,
- Name: "grpc.io/client/sent_messages_per_rpc",
- Description: "Distribution of sent messages count per RPC, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultMessageCountDistribution,
- }
-
- ClientReceivedMessagesPerRPCView = &view.View{
- Measure: ClientReceivedMessagesPerRPC,
- Name: "grpc.io/client/received_messages_per_rpc",
- Description: "Distribution of received messages count per RPC, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultMessageCountDistribution,
- }
-
- ClientServerLatencyView = &view.View{
- Measure: ClientServerLatency,
- Name: "grpc.io/client/server_latency",
- Description: "Distribution of server latency as viewed by client, by method.",
- TagKeys: []tag.Key{KeyClientMethod},
- Aggregation: DefaultMillisecondsDistribution,
- }
-)
-
-// DefaultClientViews are the default client views provided by this package.
-var DefaultClientViews = []*view.View{
- ClientSentBytesPerRPCView,
- ClientReceivedBytesPerRPCView,
- ClientRoundtripLatencyView,
- ClientCompletedRPCsView,
-}
-
-// TODO(jbd): Add roundtrip_latency, uncompressed_request_bytes, uncompressed_response_bytes, request_count, response_count.
-// TODO(acetechnologist): This is temporary and will need to be replaced by a
-// mechanism to load these defaults from a common repository/config shared by
-// all supported languages. Likely a serialized protobuf of these defaults.
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/client_stats_handler.go b/vendor/go.opencensus.io/plugin/ocgrpc/client_stats_handler.go
deleted file mode 100644
index 303c607f6..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/client_stats_handler.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package ocgrpc
-
-import (
- "time"
-
- "go.opencensus.io/tag"
- "golang.org/x/net/context"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/stats"
-)
-
-// statsTagRPC gets the tag.Map populated by the application code, serializes
-// its tags into the GRPC metadata in order to be sent to the server.
-func (h *ClientHandler) statsTagRPC(ctx context.Context, info *stats.RPCTagInfo) context.Context {
- startTime := time.Now()
- if info == nil {
- if grpclog.V(2) {
- grpclog.Infof("clientHandler.TagRPC called with nil info.", info.FullMethodName)
- }
- return ctx
- }
-
- d := &rpcData{
- startTime: startTime,
- method: info.FullMethodName,
- }
- ts := tag.FromContext(ctx)
- if ts != nil {
- encoded := tag.Encode(ts)
- ctx = stats.SetTags(ctx, encoded)
- }
-
- return context.WithValue(ctx, rpcDataKey, d)
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/doc.go b/vendor/go.opencensus.io/plugin/ocgrpc/doc.go
deleted file mode 100644
index 1370323fb..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/doc.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package ocgrpc contains OpenCensus stats and trace
-// integrations for gRPC.
-//
-// Use ServerHandler for servers and ClientHandler for clients.
-package ocgrpc // import "go.opencensus.io/plugin/ocgrpc"
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/server.go b/vendor/go.opencensus.io/plugin/ocgrpc/server.go
deleted file mode 100644
index b67b3e2be..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/server.go
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocgrpc
-
-import (
- "go.opencensus.io/trace"
- "golang.org/x/net/context"
-
- "google.golang.org/grpc/stats"
-)
-
-// ServerHandler implements gRPC stats.Handler recording OpenCensus stats and
-// traces. Use with gRPC servers.
-//
-// When installed (see Example), tracing metadata is read from inbound RPCs
-// by default. If no tracing metadata is present, or if the tracing metadata is
-// present but the SpanContext isn't sampled, then a new trace may be started
-// (as determined by Sampler).
-type ServerHandler struct {
- // IsPublicEndpoint may be set to true to always start a new trace around
- // each RPC. Any SpanContext in the RPC metadata will be added as a linked
- // span instead of making it the parent of the span created around the
- // server RPC.
- //
- // Be aware that if you leave this false (the default) on a public-facing
- // server, callers will be able to send tracing metadata in gRPC headers
- // and trigger traces in your backend.
- IsPublicEndpoint bool
-
- // StartOptions to use for to spans started around RPCs handled by this server.
- //
- // These will apply even if there is tracing metadata already
- // present on the inbound RPC but the SpanContext is not sampled. This
- // ensures that each service has some opportunity to be traced. If you would
- // like to not add any additional traces for this gRPC service, set:
- //
- // StartOptions.Sampler = trace.ProbabilitySampler(0.0)
- //
- // StartOptions.SpanKind will always be set to trace.SpanKindServer
- // for spans started by this handler.
- StartOptions trace.StartOptions
-}
-
-var _ stats.Handler = (*ServerHandler)(nil)
-
-// HandleConn exists to satisfy gRPC stats.Handler.
-func (s *ServerHandler) HandleConn(ctx context.Context, cs stats.ConnStats) {
- // no-op
-}
-
-// TagConn exists to satisfy gRPC stats.Handler.
-func (s *ServerHandler) TagConn(ctx context.Context, cti *stats.ConnTagInfo) context.Context {
- // no-op
- return ctx
-}
-
-// HandleRPC implements per-RPC tracing and stats instrumentation.
-func (s *ServerHandler) HandleRPC(ctx context.Context, rs stats.RPCStats) {
- traceHandleRPC(ctx, rs)
- statsHandleRPC(ctx, rs)
-}
-
-// TagRPC implements per-RPC context management.
-func (s *ServerHandler) TagRPC(ctx context.Context, rti *stats.RPCTagInfo) context.Context {
- ctx = s.traceTagRPC(ctx, rti)
- ctx = s.statsTagRPC(ctx, rti)
- return ctx
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/server_metrics.go b/vendor/go.opencensus.io/plugin/ocgrpc/server_metrics.go
deleted file mode 100644
index 609d9ed24..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/server_metrics.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package ocgrpc
-
-import (
- "go.opencensus.io/stats"
- "go.opencensus.io/stats/view"
- "go.opencensus.io/tag"
-)
-
-// The following variables are measures are recorded by ServerHandler:
-var (
- ServerReceivedMessagesPerRPC = stats.Int64("grpc.io/server/received_messages_per_rpc", "Number of messages received in each RPC. Has value 1 for non-streaming RPCs.", stats.UnitDimensionless)
- ServerReceivedBytesPerRPC = stats.Int64("grpc.io/server/received_bytes_per_rpc", "Total bytes received across all messages per RPC.", stats.UnitBytes)
- ServerSentMessagesPerRPC = stats.Int64("grpc.io/server/sent_messages_per_rpc", "Number of messages sent in each RPC. Has value 1 for non-streaming RPCs.", stats.UnitDimensionless)
- ServerSentBytesPerRPC = stats.Int64("grpc.io/server/sent_bytes_per_rpc", "Total bytes sent in across all response messages per RPC.", stats.UnitBytes)
- ServerLatency = stats.Float64("grpc.io/server/server_latency", "Time between first byte of request received to last byte of response sent, or terminal error.", stats.UnitMilliseconds)
-)
-
-// TODO(acetechnologist): This is temporary and will need to be replaced by a
-// mechanism to load these defaults from a common repository/config shared by
-// all supported languages. Likely a serialized protobuf of these defaults.
-
-// Predefined views may be registered to collect data for the above measures.
-// As always, you may also define your own custom views over measures collected by this
-// package. These are declared as a convenience only; none are registered by
-// default.
-var (
- ServerReceivedBytesPerRPCView = &view.View{
- Name: "grpc.io/server/received_bytes_per_rpc",
- Description: "Distribution of received bytes per RPC, by method.",
- Measure: ServerReceivedBytesPerRPC,
- TagKeys: []tag.Key{KeyServerMethod},
- Aggregation: DefaultBytesDistribution,
- }
-
- ServerSentBytesPerRPCView = &view.View{
- Name: "grpc.io/server/sent_bytes_per_rpc",
- Description: "Distribution of total sent bytes per RPC, by method.",
- Measure: ServerSentBytesPerRPC,
- TagKeys: []tag.Key{KeyServerMethod},
- Aggregation: DefaultBytesDistribution,
- }
-
- ServerLatencyView = &view.View{
- Name: "grpc.io/server/server_latency",
- Description: "Distribution of server latency in milliseconds, by method.",
- TagKeys: []tag.Key{KeyServerMethod},
- Measure: ServerLatency,
- Aggregation: DefaultMillisecondsDistribution,
- }
-
- ServerCompletedRPCsView = &view.View{
- Name: "grpc.io/server/completed_rpcs",
- Description: "Count of RPCs by method and status.",
- TagKeys: []tag.Key{KeyServerMethod, KeyServerStatus},
- Measure: ServerLatency,
- Aggregation: view.Count(),
- }
-
- ServerReceivedMessagesPerRPCView = &view.View{
- Name: "grpc.io/server/received_messages_per_rpc",
- Description: "Distribution of messages received count per RPC, by method.",
- TagKeys: []tag.Key{KeyServerMethod},
- Measure: ServerReceivedMessagesPerRPC,
- Aggregation: DefaultMessageCountDistribution,
- }
-
- ServerSentMessagesPerRPCView = &view.View{
- Name: "grpc.io/server/sent_messages_per_rpc",
- Description: "Distribution of messages sent count per RPC, by method.",
- TagKeys: []tag.Key{KeyServerMethod},
- Measure: ServerSentMessagesPerRPC,
- Aggregation: DefaultMessageCountDistribution,
- }
-)
-
-// DefaultServerViews are the default server views provided by this package.
-var DefaultServerViews = []*view.View{
- ServerReceivedBytesPerRPCView,
- ServerSentBytesPerRPCView,
- ServerLatencyView,
- ServerCompletedRPCsView,
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/server_stats_handler.go b/vendor/go.opencensus.io/plugin/ocgrpc/server_stats_handler.go
deleted file mode 100644
index 7847c1a91..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/server_stats_handler.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package ocgrpc
-
-import (
- "time"
-
- "golang.org/x/net/context"
-
- "go.opencensus.io/tag"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/stats"
-)
-
-// statsTagRPC gets the metadata from gRPC context, extracts the encoded tags from
-// it and creates a new tag.Map and puts them into the returned context.
-func (h *ServerHandler) statsTagRPC(ctx context.Context, info *stats.RPCTagInfo) context.Context {
- startTime := time.Now()
- if info == nil {
- if grpclog.V(2) {
- grpclog.Infof("opencensus: TagRPC called with nil info.")
- }
- return ctx
- }
- d := &rpcData{
- startTime: startTime,
- method: info.FullMethodName,
- }
- propagated := h.extractPropagatedTags(ctx)
- ctx = tag.NewContext(ctx, propagated)
- ctx, _ = tag.New(ctx, tag.Upsert(KeyServerMethod, methodName(info.FullMethodName)))
- return context.WithValue(ctx, rpcDataKey, d)
-}
-
-// extractPropagatedTags creates a new tag map containing the tags extracted from the
-// gRPC metadata.
-func (h *ServerHandler) extractPropagatedTags(ctx context.Context) *tag.Map {
- buf := stats.Tags(ctx)
- if buf == nil {
- return nil
- }
- propagated, err := tag.Decode(buf)
- if err != nil {
- if grpclog.V(2) {
- grpclog.Warningf("opencensus: Failed to decode tags from gRPC metadata failed to decode: %v", err)
- }
- return nil
- }
- return propagated
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/stats_common.go b/vendor/go.opencensus.io/plugin/ocgrpc/stats_common.go
deleted file mode 100644
index e9991fe0f..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/stats_common.go
+++ /dev/null
@@ -1,208 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package ocgrpc
-
-import (
- "context"
- "strconv"
- "strings"
- "sync/atomic"
- "time"
-
- ocstats "go.opencensus.io/stats"
- "go.opencensus.io/stats/view"
- "go.opencensus.io/tag"
- "google.golang.org/grpc/codes"
- "google.golang.org/grpc/grpclog"
- "google.golang.org/grpc/stats"
- "google.golang.org/grpc/status"
-)
-
-type grpcInstrumentationKey string
-
-// rpcData holds the instrumentation RPC data that is needed between the start
-// and end of an call. It holds the info that this package needs to keep track
-// of between the various GRPC events.
-type rpcData struct {
- // reqCount and respCount has to be the first words
- // in order to be 64-aligned on 32-bit architectures.
- sentCount, sentBytes, recvCount, recvBytes int64 // access atomically
-
- // startTime represents the time at which TagRPC was invoked at the
- // beginning of an RPC. It is an appoximation of the time when the
- // application code invoked GRPC code.
- startTime time.Time
- method string
-}
-
-// The following variables define the default hard-coded auxiliary data used by
-// both the default GRPC client and GRPC server metrics.
-var (
- DefaultBytesDistribution = view.Distribution(1024, 2048, 4096, 16384, 65536, 262144, 1048576, 4194304, 16777216, 67108864, 268435456, 1073741824, 4294967296)
- DefaultMillisecondsDistribution = view.Distribution(0.01, 0.05, 0.1, 0.3, 0.6, 0.8, 1, 2, 3, 4, 5, 6, 8, 10, 13, 16, 20, 25, 30, 40, 50, 65, 80, 100, 130, 160, 200, 250, 300, 400, 500, 650, 800, 1000, 2000, 5000, 10000, 20000, 50000, 100000)
- DefaultMessageCountDistribution = view.Distribution(1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536)
-)
-
-// Server tags are applied to the context used to process each RPC, as well as
-// the measures at the end of each RPC.
-var (
- KeyServerMethod, _ = tag.NewKey("grpc_server_method")
- KeyServerStatus, _ = tag.NewKey("grpc_server_status")
-)
-
-// Client tags are applied to measures at the end of each RPC.
-var (
- KeyClientMethod, _ = tag.NewKey("grpc_client_method")
- KeyClientStatus, _ = tag.NewKey("grpc_client_status")
-)
-
-var (
- rpcDataKey = grpcInstrumentationKey("opencensus-rpcData")
-)
-
-func methodName(fullname string) string {
- return strings.TrimLeft(fullname, "/")
-}
-
-// statsHandleRPC processes the RPC events.
-func statsHandleRPC(ctx context.Context, s stats.RPCStats) {
- switch st := s.(type) {
- case *stats.Begin, *stats.OutHeader, *stats.InHeader, *stats.InTrailer, *stats.OutTrailer:
- // do nothing for client
- case *stats.OutPayload:
- handleRPCOutPayload(ctx, st)
- case *stats.InPayload:
- handleRPCInPayload(ctx, st)
- case *stats.End:
- handleRPCEnd(ctx, st)
- default:
- grpclog.Infof("unexpected stats: %T", st)
- }
-}
-
-func handleRPCOutPayload(ctx context.Context, s *stats.OutPayload) {
- d, ok := ctx.Value(rpcDataKey).(*rpcData)
- if !ok {
- if grpclog.V(2) {
- grpclog.Infoln("Failed to retrieve *rpcData from context.")
- }
- return
- }
-
- atomic.AddInt64(&d.sentBytes, int64(s.Length))
- atomic.AddInt64(&d.sentCount, 1)
-}
-
-func handleRPCInPayload(ctx context.Context, s *stats.InPayload) {
- d, ok := ctx.Value(rpcDataKey).(*rpcData)
- if !ok {
- if grpclog.V(2) {
- grpclog.Infoln("Failed to retrieve *rpcData from context.")
- }
- return
- }
-
- atomic.AddInt64(&d.recvBytes, int64(s.Length))
- atomic.AddInt64(&d.recvCount, 1)
-}
-
-func handleRPCEnd(ctx context.Context, s *stats.End) {
- d, ok := ctx.Value(rpcDataKey).(*rpcData)
- if !ok {
- if grpclog.V(2) {
- grpclog.Infoln("Failed to retrieve *rpcData from context.")
- }
- return
- }
-
- elapsedTime := time.Since(d.startTime)
-
- var st string
- if s.Error != nil {
- s, ok := status.FromError(s.Error)
- if ok {
- st = statusCodeToString(s)
- }
- } else {
- st = "OK"
- }
-
- latencyMillis := float64(elapsedTime) / float64(time.Millisecond)
- if s.Client {
- ocstats.RecordWithTags(ctx,
- []tag.Mutator{
- tag.Upsert(KeyClientMethod, methodName(d.method)),
- tag.Upsert(KeyClientStatus, st),
- },
- ClientSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
- ClientSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
- ClientReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
- ClientReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
- ClientRoundtripLatency.M(latencyMillis))
- } else {
- ocstats.RecordWithTags(ctx,
- []tag.Mutator{
- tag.Upsert(KeyServerStatus, st),
- },
- ServerSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
- ServerSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
- ServerReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
- ServerReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
- ServerLatency.M(latencyMillis))
- }
-}
-
-func statusCodeToString(s *status.Status) string {
- // see https://github.com/grpc/grpc/blob/master/doc/statuscodes.md
- switch c := s.Code(); c {
- case codes.OK:
- return "OK"
- case codes.Canceled:
- return "CANCELLED"
- case codes.Unknown:
- return "UNKNOWN"
- case codes.InvalidArgument:
- return "INVALID_ARGUMENT"
- case codes.DeadlineExceeded:
- return "DEADLINE_EXCEEDED"
- case codes.NotFound:
- return "NOT_FOUND"
- case codes.AlreadyExists:
- return "ALREADY_EXISTS"
- case codes.PermissionDenied:
- return "PERMISSION_DENIED"
- case codes.ResourceExhausted:
- return "RESOURCE_EXHAUSTED"
- case codes.FailedPrecondition:
- return "FAILED_PRECONDITION"
- case codes.Aborted:
- return "ABORTED"
- case codes.OutOfRange:
- return "OUT_OF_RANGE"
- case codes.Unimplemented:
- return "UNIMPLEMENTED"
- case codes.Internal:
- return "INTERNAL"
- case codes.Unavailable:
- return "UNAVAILABLE"
- case codes.DataLoss:
- return "DATA_LOSS"
- case codes.Unauthenticated:
- return "UNAUTHENTICATED"
- default:
- return "CODE_" + strconv.FormatInt(int64(c), 10)
- }
-}
diff --git a/vendor/go.opencensus.io/plugin/ocgrpc/trace_common.go b/vendor/go.opencensus.io/plugin/ocgrpc/trace_common.go
deleted file mode 100644
index 720f381c2..000000000
--- a/vendor/go.opencensus.io/plugin/ocgrpc/trace_common.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright 2017, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ocgrpc
-
-import (
- "strings"
-
- "google.golang.org/grpc/codes"
-
- "go.opencensus.io/trace"
- "go.opencensus.io/trace/propagation"
- "golang.org/x/net/context"
- "google.golang.org/grpc/metadata"
- "google.golang.org/grpc/stats"
- "google.golang.org/grpc/status"
-)
-
-const traceContextKey = "grpc-trace-bin"
-
-// TagRPC creates a new trace span for the client side of the RPC.
-//
-// It returns ctx with the new trace span added and a serialization of the
-// SpanContext added to the outgoing gRPC metadata.
-func (c *ClientHandler) traceTagRPC(ctx context.Context, rti *stats.RPCTagInfo) context.Context {
- name := strings.TrimPrefix(rti.FullMethodName, "/")
- name = strings.Replace(name, "/", ".", -1)
- ctx, span := trace.StartSpan(ctx, name,
- trace.WithSampler(c.StartOptions.Sampler),
- trace.WithSpanKind(trace.SpanKindClient)) // span is ended by traceHandleRPC
- traceContextBinary := propagation.Binary(span.SpanContext())
- return metadata.AppendToOutgoingContext(ctx, traceContextKey, string(traceContextBinary))
-}
-
-// TagRPC creates a new trace span for the server side of the RPC.
-//
-// It checks the incoming gRPC metadata in ctx for a SpanContext, and if
-// it finds one, uses that SpanContext as the parent context of the new span.
-//
-// It returns ctx, with the new trace span added.
-func (s *ServerHandler) traceTagRPC(ctx context.Context, rti *stats.RPCTagInfo) context.Context {
- md, _ := metadata.FromIncomingContext(ctx)
- name := strings.TrimPrefix(rti.FullMethodName, "/")
- name = strings.Replace(name, "/", ".", -1)
- traceContext := md[traceContextKey]
- var (
- parent trace.SpanContext
- haveParent bool
- )
- if len(traceContext) > 0 {
- // Metadata with keys ending in -bin are actually binary. They are base64
- // encoded before being put on the wire, see:
- // https://github.com/grpc/grpc-go/blob/08d6261/Documentation/grpc-metadata.md#storing-binary-data-in-metadata
- traceContextBinary := []byte(traceContext[0])
- parent, haveParent = propagation.FromBinary(traceContextBinary)
- if haveParent && !s.IsPublicEndpoint {
- ctx, _ := trace.StartSpanWithRemoteParent(ctx, name, parent,
- trace.WithSpanKind(trace.SpanKindServer),
- trace.WithSampler(s.StartOptions.Sampler),
- )
- return ctx
- }
- }
- ctx, span := trace.StartSpan(ctx, name,
- trace.WithSpanKind(trace.SpanKindServer),
- trace.WithSampler(s.StartOptions.Sampler))
- if haveParent {
- span.AddLink(trace.Link{TraceID: parent.TraceID, SpanID: parent.SpanID, Type: trace.LinkTypeChild})
- }
- return ctx
-}
-
-func traceHandleRPC(ctx context.Context, rs stats.RPCStats) {
- span := trace.FromContext(ctx)
- // TODO: compressed and uncompressed sizes are not populated in every message.
- switch rs := rs.(type) {
- case *stats.Begin:
- span.AddAttributes(
- trace.BoolAttribute("Client", rs.Client),
- trace.BoolAttribute("FailFast", rs.FailFast))
- case *stats.InPayload:
- span.AddMessageReceiveEvent(0 /* TODO: messageID */, int64(rs.Length), int64(rs.WireLength))
- case *stats.OutPayload:
- span.AddMessageSendEvent(0, int64(rs.Length), int64(rs.WireLength))
- case *stats.End:
- if rs.Error != nil {
- s, ok := status.FromError(rs.Error)
- if ok {
- span.SetStatus(trace.Status{Code: int32(s.Code()), Message: s.Message()})
- } else {
- span.SetStatus(trace.Status{Code: int32(codes.Internal), Message: rs.Error.Error()})
- }
- }
- span.End()
- }
-}
diff --git a/vendor/go.opencensus.io/plugin/ochttp/propagation/tracecontext/propagation.go b/vendor/go.opencensus.io/plugin/ochttp/propagation/tracecontext/propagation.go
deleted file mode 100644
index 65ab1e996..000000000
--- a/vendor/go.opencensus.io/plugin/ochttp/propagation/tracecontext/propagation.go
+++ /dev/null
@@ -1,187 +0,0 @@
-// Copyright 2018, OpenCensus Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package tracecontext contains HTTP propagator for TraceContext standard.
-// See https://github.com/w3c/distributed-tracing for more information.
-package tracecontext // import "go.opencensus.io/plugin/ochttp/propagation/tracecontext"
-
-import (
- "encoding/hex"
- "fmt"
- "net/http"
- "net/textproto"
- "regexp"
- "strings"
-
- "go.opencensus.io/trace"
- "go.opencensus.io/trace/propagation"
- "go.opencensus.io/trace/tracestate"
-)
-
-const (
- supportedVersion = 0
- maxVersion = 254
- maxTracestateLen = 512
- traceparentHeader = "traceparent"
- tracestateHeader = "tracestate"
- trimOWSRegexFmt = `^[\x09\x20]*(.*[^\x20\x09])[\x09\x20]*$`
-)
-
-var trimOWSRegExp = regexp.MustCompile(trimOWSRegexFmt)
-
-var _ propagation.HTTPFormat = (*HTTPFormat)(nil)
-
-// HTTPFormat implements the TraceContext trace propagation format.
-type HTTPFormat struct{}
-
-// SpanContextFromRequest extracts a span context from incoming requests.
-func (f *HTTPFormat) SpanContextFromRequest(req *http.Request) (sc trace.SpanContext, ok bool) {
- h, ok := getRequestHeader(req, traceparentHeader, false)
- if !ok {
- return trace.SpanContext{}, false
- }
- sections := strings.Split(h, "-")
- if len(sections) < 4 {
- return trace.SpanContext{}, false
- }
-
- if len(sections[0]) != 2 {
- return trace.SpanContext{}, false
- }
- ver, err := hex.DecodeString(sections[0])
- if err != nil {
- return trace.SpanContext{}, false
- }
- version := int(ver[0])
- if version > maxVersion {
- return trace.SpanContext{}, false
- }
-
- if version == 0 && len(sections) != 4 {
- return trace.SpanContext{}, false
- }
-
- if len(sections[1]) != 32 {
- return trace.SpanContext{}, false
- }
- tid, err := hex.DecodeString(sections[1])
- if err != nil {
- return trace.SpanContext{}, false
- }
- copy(sc.TraceID[:], tid)
-
- if len(sections[2]) != 16 {
- return trace.SpanContext{}, false
- }
- sid, err := hex.DecodeString(sections[2])
- if err != nil {
- return trace.SpanContext{}, false
- }
- copy(sc.SpanID[:], sid)
-
- opts, err := hex.DecodeString(sections[3])
- if err != nil || len(opts) < 1 {
- return trace.SpanContext{}, false
- }
- sc.TraceOptions = trace.TraceOptions(opts[0])
-
- // Don't allow all zero trace or span ID.
- if sc.TraceID == [16]byte{} || sc.SpanID == [8]byte{} {
- return trace.SpanContext{}, false
- }
-
- sc.Tracestate = tracestateFromRequest(req)
- return sc, true
-}
-
-// getRequestHeader returns a combined header field according to RFC7230 section 3.2.2.
-// If commaSeparated is true, multiple header fields with the same field name using be
-// combined using ",".
-// If no header was found using the given name, "ok" would be false.
-// If more than one headers was found using the given name, while commaSeparated is false,
-// "ok" would be false.
-func getRequestHeader(req *http.Request, name string, commaSeparated bool) (hdr string, ok bool) {
- v := req.Header[textproto.CanonicalMIMEHeaderKey(name)]
- switch len(v) {
- case 0:
- return "", false
- case 1:
- return v[0], true
- default:
- return strings.Join(v, ","), commaSeparated
- }
-}
-
-// TODO(rghetia): return an empty Tracestate when parsing tracestate header encounters an error.
-// Revisit to return additional boolean value to indicate parsing error when following issues
-// are resolved.
-// https://github.com/w3c/distributed-tracing/issues/172
-// https://github.com/w3c/distributed-tracing/issues/175
-func tracestateFromRequest(req *http.Request) *tracestate.Tracestate {
- h, _ := getRequestHeader(req, tracestateHeader, true)
- if h == "" {
- return nil
- }
-
- var entries []tracestate.Entry
- pairs := strings.Split(h, ",")
- hdrLenWithoutOWS := len(pairs) - 1 // Number of commas
- for _, pair := range pairs {
- matches := trimOWSRegExp.FindStringSubmatch(pair)
- if matches == nil {
- return nil
- }
- pair = matches[1]
- hdrLenWithoutOWS += len(pair)
- if hdrLenWithoutOWS > maxTracestateLen {
- return nil
- }
- kv := strings.Split(pair, "=")
- if len(kv) != 2 {
- return nil
- }
- entries = append(entries, tracestate.Entry{Key: kv[0], Value: kv[1]})
- }
- ts, err := tracestate.New(nil, entries...)
- if err != nil {
- return nil
- }
-
- return ts
-}
-
-func tracestateToRequest(sc trace.SpanContext, req *http.Request) {
- var pairs = make([]string, 0, len(sc.Tracestate.Entries()))
- if sc.Tracestate != nil {
- for _, entry := range sc.Tracestate.Entries() {
- pairs = append(pairs, strings.Join([]string{entry.Key, entry.Value}, "="))
- }
- h := strings.Join(pairs, ",")
-
- if h != "" && len(h) <= maxTracestateLen {
- req.Header.Set(tracestateHeader, h)
- }
- }
-}
-
-// SpanContextToRequest modifies the given request to include traceparent and tracestate headers.
-func (f *HTTPFormat) SpanContextToRequest(sc trace.SpanContext, req *http.Request) {
- h := fmt.Sprintf("%x-%x-%x-%x",
- []byte{supportedVersion},
- sc.TraceID[:],
- sc.SpanID[:],
- []byte{byte(sc.TraceOptions)})
- req.Header.Set(traceparentHeader, h)
- tracestateToRequest(sc, req)
-}
diff --git a/vendor/golang.org/x/sync/AUTHORS b/vendor/golang.org/x/sync/AUTHORS
deleted file mode 100644
index 15167cd74..000000000
--- a/vendor/golang.org/x/sync/AUTHORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code refers to The Go Authors for copyright purposes.
-# The master list of authors is in the main Go distribution,
-# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/sync/CONTRIBUTORS b/vendor/golang.org/x/sync/CONTRIBUTORS
deleted file mode 100644
index 1c4577e96..000000000
--- a/vendor/golang.org/x/sync/CONTRIBUTORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code was written by the Go contributors.
-# The master list of contributors is in the main Go distribution,
-# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/sync/LICENSE b/vendor/golang.org/x/sync/LICENSE
deleted file mode 100644
index 6a66aea5e..000000000
--- a/vendor/golang.org/x/sync/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2009 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/sync/PATENTS b/vendor/golang.org/x/sync/PATENTS
deleted file mode 100644
index 733099041..000000000
--- a/vendor/golang.org/x/sync/PATENTS
+++ /dev/null
@@ -1,22 +0,0 @@
-Additional IP Rights Grant (Patents)
-
-"This implementation" means the copyrightable works distributed by
-Google as part of the Go project.
-
-Google hereby grants to You a perpetual, worldwide, non-exclusive,
-no-charge, royalty-free, irrevocable (except as stated in this section)
-patent license to make, have made, use, offer to sell, sell, import,
-transfer and otherwise run, modify and propagate the contents of this
-implementation of Go, where such license applies only to those patent
-claims, both currently owned or controlled by Google and acquired in
-the future, licensable by Google that are necessarily infringed by this
-implementation of Go. This grant does not include claims that would be
-infringed only as a consequence of further modification of this
-implementation. If you or your agent or exclusive licensee institute or
-order or agree to the institution of patent litigation against any
-entity (including a cross-claim or counterclaim in a lawsuit) alleging
-that this implementation of Go or any code incorporated within this
-implementation of Go constitutes direct or contributory patent
-infringement, or inducement of patent infringement, then any patent
-rights granted to you under this License for this implementation of Go
-shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/sync/semaphore/semaphore.go b/vendor/golang.org/x/sync/semaphore/semaphore.go
deleted file mode 100644
index 7f096fef0..000000000
--- a/vendor/golang.org/x/sync/semaphore/semaphore.go
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package semaphore provides a weighted semaphore implementation.
-package semaphore // import "golang.org/x/sync/semaphore"
-
-import (
- "container/list"
- "context"
- "sync"
-)
-
-type waiter struct {
- n int64
- ready chan<- struct{} // Closed when semaphore acquired.
-}
-
-// NewWeighted creates a new weighted semaphore with the given
-// maximum combined weight for concurrent access.
-func NewWeighted(n int64) *Weighted {
- w := &Weighted{size: n}
- return w
-}
-
-// Weighted provides a way to bound concurrent access to a resource.
-// The callers can request access with a given weight.
-type Weighted struct {
- size int64
- cur int64
- mu sync.Mutex
- waiters list.List
-}
-
-// Acquire acquires the semaphore with a weight of n, blocking until resources
-// are available or ctx is done. On success, returns nil. On failure, returns
-// ctx.Err() and leaves the semaphore unchanged.
-//
-// If ctx is already done, Acquire may still succeed without blocking.
-func (s *Weighted) Acquire(ctx context.Context, n int64) error {
- s.mu.Lock()
- if s.size-s.cur >= n && s.waiters.Len() == 0 {
- s.cur += n
- s.mu.Unlock()
- return nil
- }
-
- if n > s.size {
- // Don't make other Acquire calls block on one that's doomed to fail.
- s.mu.Unlock()
- <-ctx.Done()
- return ctx.Err()
- }
-
- ready := make(chan struct{})
- w := waiter{n: n, ready: ready}
- elem := s.waiters.PushBack(w)
- s.mu.Unlock()
-
- select {
- case <-ctx.Done():
- err := ctx.Err()
- s.mu.Lock()
- select {
- case <-ready:
- // Acquired the semaphore after we were canceled. Rather than trying to
- // fix up the queue, just pretend we didn't notice the cancelation.
- err = nil
- default:
- s.waiters.Remove(elem)
- }
- s.mu.Unlock()
- return err
-
- case <-ready:
- return nil
- }
-}
-
-// TryAcquire acquires the semaphore with a weight of n without blocking.
-// On success, returns true. On failure, returns false and leaves the semaphore unchanged.
-func (s *Weighted) TryAcquire(n int64) bool {
- s.mu.Lock()
- success := s.size-s.cur >= n && s.waiters.Len() == 0
- if success {
- s.cur += n
- }
- s.mu.Unlock()
- return success
-}
-
-// Release releases the semaphore with a weight of n.
-func (s *Weighted) Release(n int64) {
- s.mu.Lock()
- s.cur -= n
- if s.cur < 0 {
- s.mu.Unlock()
- panic("semaphore: released more than held")
- }
- for {
- next := s.waiters.Front()
- if next == nil {
- break // No more waiters blocked.
- }
-
- w := next.Value.(waiter)
- if s.size-s.cur < w.n {
- // Not enough tokens for the next waiter. We could keep going (to try to
- // find a waiter with a smaller request), but under load that could cause
- // starvation for large requests; instead, we leave all remaining waiters
- // blocked.
- //
- // Consider a semaphore used as a read-write lock, with N tokens, N
- // readers, and one writer. Each reader can Acquire(1) to obtain a read
- // lock. The writer can Acquire(N) to obtain a write lock, excluding all
- // of the readers. If we allow the readers to jump ahead in the queue,
- // the writer will starve — there is always one token available for every
- // reader.
- break
- }
-
- s.cur += w.n
- s.waiters.Remove(next)
- close(w.ready)
- }
- s.mu.Unlock()
-}
diff --git a/vendor/google.golang.org/api/support/bundler/bundler.go b/vendor/google.golang.org/api/support/bundler/bundler.go
deleted file mode 100644
index c55327119..000000000
--- a/vendor/google.golang.org/api/support/bundler/bundler.go
+++ /dev/null
@@ -1,349 +0,0 @@
-// Copyright 2016 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package bundler supports bundling (batching) of items. Bundling amortizes an
-// action with fixed costs over multiple items. For example, if an API provides
-// an RPC that accepts a list of items as input, but clients would prefer
-// adding items one at a time, then a Bundler can accept individual items from
-// the client and bundle many of them into a single RPC.
-//
-// This package is experimental and subject to change without notice.
-package bundler
-
-import (
- "context"
- "errors"
- "math"
- "reflect"
- "sync"
- "time"
-
- "golang.org/x/sync/semaphore"
-)
-
-const (
- DefaultDelayThreshold = time.Second
- DefaultBundleCountThreshold = 10
- DefaultBundleByteThreshold = 1e6 // 1M
- DefaultBufferedByteLimit = 1e9 // 1G
-)
-
-var (
- // ErrOverflow indicates that Bundler's stored bytes exceeds its BufferedByteLimit.
- ErrOverflow = errors.New("bundler reached buffered byte limit")
-
- // ErrOversizedItem indicates that an item's size exceeds the maximum bundle size.
- ErrOversizedItem = errors.New("item size exceeds bundle byte limit")
-)
-
-// A Bundler collects items added to it into a bundle until the bundle
-// exceeds a given size, then calls a user-provided function to handle the bundle.
-type Bundler struct {
- // Starting from the time that the first message is added to a bundle, once
- // this delay has passed, handle the bundle. The default is DefaultDelayThreshold.
- DelayThreshold time.Duration
-
- // Once a bundle has this many items, handle the bundle. Since only one
- // item at a time is added to a bundle, no bundle will exceed this
- // threshold, so it also serves as a limit. The default is
- // DefaultBundleCountThreshold.
- BundleCountThreshold int
-
- // Once the number of bytes in current bundle reaches this threshold, handle
- // the bundle. The default is DefaultBundleByteThreshold. This triggers handling,
- // but does not cap the total size of a bundle.
- BundleByteThreshold int
-
- // The maximum size of a bundle, in bytes. Zero means unlimited.
- BundleByteLimit int
-
- // The maximum number of bytes that the Bundler will keep in memory before
- // returning ErrOverflow. The default is DefaultBufferedByteLimit.
- BufferedByteLimit int
-
- // The maximum number of handler invocations that can be running at once.
- // The default is 1.
- HandlerLimit int
-
- handler func(interface{}) // called to handle a bundle
- itemSliceZero reflect.Value // nil (zero value) for slice of items
- flushTimer *time.Timer // implements DelayThreshold
-
- mu sync.Mutex
- sem *semaphore.Weighted // enforces BufferedByteLimit
- semOnce sync.Once
- curBundle bundle // incoming items added to this bundle
-
- // Each bundle is assigned a unique ticket that determines the order in which the
- // handler is called. The ticket is assigned with mu locked, but waiting for tickets
- // to be handled is done via mu2 and cond, below.
- nextTicket uint64 // next ticket to be assigned
-
- mu2 sync.Mutex
- cond *sync.Cond
- nextHandled uint64 // next ticket to be handled
-
- // In this implementation, active uses space proportional to HandlerLimit, and
- // waitUntilAllHandled takes time proportional to HandlerLimit each time an acquire
- // or release occurs, so large values of HandlerLimit max may cause performance
- // issues.
- active map[uint64]bool // tickets of bundles actively being handled
-}
-
-type bundle struct {
- items reflect.Value // slice of item type
- size int // size in bytes of all items
-}
-
-// NewBundler creates a new Bundler.
-//
-// itemExample is a value of the type that will be bundled. For example, if you
-// want to create bundles of *Entry, you could pass &Entry{} for itemExample.
-//
-// handler is a function that will be called on each bundle. If itemExample is
-// of type T, the argument to handler is of type []T. handler is always called
-// sequentially for each bundle, and never in parallel.
-//
-// Configure the Bundler by setting its thresholds and limits before calling
-// any of its methods.
-func NewBundler(itemExample interface{}, handler func(interface{})) *Bundler {
- b := &Bundler{
- DelayThreshold: DefaultDelayThreshold,
- BundleCountThreshold: DefaultBundleCountThreshold,
- BundleByteThreshold: DefaultBundleByteThreshold,
- BufferedByteLimit: DefaultBufferedByteLimit,
- HandlerLimit: 1,
-
- handler: handler,
- itemSliceZero: reflect.Zero(reflect.SliceOf(reflect.TypeOf(itemExample))),
- active: map[uint64]bool{},
- }
- b.curBundle.items = b.itemSliceZero
- b.cond = sync.NewCond(&b.mu2)
- return b
-}
-
-func (b *Bundler) initSemaphores() {
- // Create the semaphores lazily, because the user may set limits
- // after NewBundler.
- b.semOnce.Do(func() {
- b.sem = semaphore.NewWeighted(int64(b.BufferedByteLimit))
- })
-}
-
-// Add adds item to the current bundle. It marks the bundle for handling and
-// starts a new one if any of the thresholds or limits are exceeded.
-//
-// If the item's size exceeds the maximum bundle size (Bundler.BundleByteLimit), then
-// the item can never be handled. Add returns ErrOversizedItem in this case.
-//
-// If adding the item would exceed the maximum memory allowed
-// (Bundler.BufferedByteLimit) or an AddWait call is blocked waiting for
-// memory, Add returns ErrOverflow.
-//
-// Add never blocks.
-func (b *Bundler) Add(item interface{}, size int) error {
- // If this item exceeds the maximum size of a bundle,
- // we can never send it.
- if b.BundleByteLimit > 0 && size > b.BundleByteLimit {
- return ErrOversizedItem
- }
- // If adding this item would exceed our allotted memory
- // footprint, we can't accept it.
- // (TryAcquire also returns false if anything is waiting on the semaphore,
- // so calls to Add and AddWait shouldn't be mixed.)
- b.initSemaphores()
- if !b.sem.TryAcquire(int64(size)) {
- return ErrOverflow
- }
- b.add(item, size)
- return nil
-}
-
-// add adds item to the current bundle. It marks the bundle for handling and
-// starts a new one if any of the thresholds or limits are exceeded.
-func (b *Bundler) add(item interface{}, size int) {
- b.mu.Lock()
- defer b.mu.Unlock()
-
- // If adding this item to the current bundle would cause it to exceed the
- // maximum bundle size, close the current bundle and start a new one.
- if b.BundleByteLimit > 0 && b.curBundle.size+size > b.BundleByteLimit {
- b.startFlushLocked()
- }
- // Add the item.
- b.curBundle.items = reflect.Append(b.curBundle.items, reflect.ValueOf(item))
- b.curBundle.size += size
-
- // Start a timer to flush the item if one isn't already running.
- // startFlushLocked clears the timer and closes the bundle at the same time,
- // so we only allocate a new timer for the first item in each bundle.
- // (We could try to call Reset on the timer instead, but that would add a lot
- // of complexity to the code just to save one small allocation.)
- if b.flushTimer == nil {
- b.flushTimer = time.AfterFunc(b.DelayThreshold, b.Flush)
- }
-
- // If the current bundle equals the count threshold, close it.
- if b.curBundle.items.Len() == b.BundleCountThreshold {
- b.startFlushLocked()
- }
- // If the current bundle equals or exceeds the byte threshold, close it.
- if b.curBundle.size >= b.BundleByteThreshold {
- b.startFlushLocked()
- }
-}
-
-// AddWait adds item to the current bundle. It marks the bundle for handling and
-// starts a new one if any of the thresholds or limits are exceeded.
-//
-// If the item's size exceeds the maximum bundle size (Bundler.BundleByteLimit), then
-// the item can never be handled. AddWait returns ErrOversizedItem in this case.
-//
-// If adding the item would exceed the maximum memory allowed (Bundler.BufferedByteLimit),
-// AddWait blocks until space is available or ctx is done.
-//
-// Calls to Add and AddWait should not be mixed on the same Bundler.
-func (b *Bundler) AddWait(ctx context.Context, item interface{}, size int) error {
- // If this item exceeds the maximum size of a bundle,
- // we can never send it.
- if b.BundleByteLimit > 0 && size > b.BundleByteLimit {
- return ErrOversizedItem
- }
- // If adding this item would exceed our allotted memory footprint, block
- // until space is available. The semaphore is FIFO, so there will be no
- // starvation.
- b.initSemaphores()
- if err := b.sem.Acquire(ctx, int64(size)); err != nil {
- return err
- }
- // Here, we've reserved space for item. Other goroutines can call AddWait
- // and even acquire space, but no one can take away our reservation
- // (assuming sem.Release is used correctly). So there is no race condition
- // resulting from locking the mutex after sem.Acquire returns.
- b.add(item, size)
- return nil
-}
-
-// Flush invokes the handler for all remaining items in the Bundler and waits
-// for it to return.
-func (b *Bundler) Flush() {
- b.mu.Lock()
- b.startFlushLocked()
- // Here, all bundles with tickets < b.nextTicket are
- // either finished or active. Those are the ones
- // we want to wait for.
- t := b.nextTicket
- b.mu.Unlock()
- b.initSemaphores()
- b.waitUntilAllHandled(t)
-}
-
-func (b *Bundler) startFlushLocked() {
- if b.flushTimer != nil {
- b.flushTimer.Stop()
- b.flushTimer = nil
- }
- if b.curBundle.items.Len() == 0 {
- return
- }
- // Here, both semaphores must have been initialized.
- bun := b.curBundle
- b.curBundle = bundle{items: b.itemSliceZero}
- ticket := b.nextTicket
- b.nextTicket++
- go func() {
- defer func() {
- b.sem.Release(int64(bun.size))
- b.release(ticket)
- }()
- b.acquire(ticket)
- b.handler(bun.items.Interface())
- }()
-}
-
-// acquire blocks until ticket is the next to be served, then returns. In order for N
-// acquire calls to return, the tickets must be in the range [0, N). A ticket must
-// not be presented to acquire more than once.
-func (b *Bundler) acquire(ticket uint64) {
- b.mu2.Lock()
- defer b.mu2.Unlock()
- if ticket < b.nextHandled {
- panic("bundler: acquire: arg too small")
- }
- for !(ticket == b.nextHandled && len(b.active) < b.HandlerLimit) {
- b.cond.Wait()
- }
- // Here,
- // ticket == b.nextHandled: the caller is the next one to be handled;
- // and len(b.active) < b.HandlerLimit: there is space available.
- b.active[ticket] = true
- b.nextHandled++
- // Broadcast, not Signal: although at most one acquire waiter can make progress,
- // there might be waiters in waitUntilAllHandled.
- b.cond.Broadcast()
-}
-
-// If a ticket is used for a call to acquire, it must later be passed to release. A
-// ticket must not be presented to release more than once.
-func (b *Bundler) release(ticket uint64) {
- b.mu2.Lock()
- defer b.mu2.Unlock()
- if !b.active[ticket] {
- panic("bundler: release: not an active ticket")
- }
- delete(b.active, ticket)
- b.cond.Broadcast()
-}
-
-// waitUntilAllHandled blocks until all tickets < n have called release, meaning
-// all bundles with tickets < n have been handled.
-func (b *Bundler) waitUntilAllHandled(n uint64) {
- // Proof of correctness of this function.
- // "N is acquired" means acquire(N) has returned.
- // "N is released" means release(N) has returned.
- // 1. If N is acquired, N-1 is acquired.
- // Follows from the loop test in acquire, and the fact
- // that nextHandled is incremented by 1.
- // 2. If nextHandled >= N, then N-1 is acquired.
- // Because we only increment nextHandled to N after N-1 is acquired.
- // 3. If nextHandled >= N, then all n < N is acquired.
- // Follows from #1 and #2.
- // 4. If N is acquired and N is not in active, then N is released.
- // Because we put N in active before acquire returns, and only
- // remove it when it is released.
- // Let min(active) be the smallest member of active, or infinity if active is empty.
- // 5. If nextHandled >= N and N <= min(active), then all n < N is released.
- // From nextHandled >= N and #3, all n < N is acquired.
- // N <= min(active) implies n < min(active) for all n < N. So all n < N is not in active.
- // So from #4, all n < N is released.
- // The loop test below is the antecedent of #5.
- b.mu2.Lock()
- defer b.mu2.Unlock()
- for !(b.nextHandled >= n && n <= min(b.active)) {
- b.cond.Wait()
- }
-}
-
-// min returns the minimum value of the set s, or the largest uint64 if
-// s is empty.
-func min(s map[uint64]bool) uint64 {
- var m uint64 = math.MaxUint64
- for n := range s {
- if n < m {
- m = n
- }
- }
- return m
-}
diff --git a/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go b/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go
deleted file mode 100644
index 26021b0f9..000000000
--- a/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go
+++ /dev/null
@@ -1,142 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: google/api/httpbody.proto
-
-package httpbody // import "google.golang.org/genproto/googleapis/api/httpbody"
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-import any "github.com/golang/protobuf/ptypes/any"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
-
-// Message that represents an arbitrary HTTP body. It should only be used for
-// payload formats that can't be represented as JSON, such as raw binary or
-// an HTML page.
-//
-//
-// This message can be used both in streaming and non-streaming API methods in
-// the request as well as the response.
-//
-// It can be used as a top-level request field, which is convenient if one
-// wants to extract parameters from either the URL or HTTP template into the
-// request fields and also want access to the raw HTTP body.
-//
-// Example:
-//
-// message GetResourceRequest {
-// // A unique request id.
-// string request_id = 1;
-//
-// // The raw HTTP body is bound to this field.
-// google.api.HttpBody http_body = 2;
-// }
-//
-// service ResourceService {
-// rpc GetResource(GetResourceRequest) returns (google.api.HttpBody);
-// rpc UpdateResource(google.api.HttpBody) returns
-// (google.protobuf.Empty);
-// }
-//
-// Example with streaming methods:
-//
-// service CaldavService {
-// rpc GetCalendar(stream google.api.HttpBody)
-// returns (stream google.api.HttpBody);
-// rpc UpdateCalendar(stream google.api.HttpBody)
-// returns (stream google.api.HttpBody);
-// }
-//
-// Use of this type only changes how the request and response bodies are
-// handled, all other features will continue to work unchanged.
-type HttpBody struct {
- // The HTTP Content-Type header value specifying the content type of the body.
- ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"`
- // The HTTP request/response body as raw binary.
- Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"`
- // Application specific response metadata. Must be set in the first response
- // for streaming APIs.
- Extensions []*any.Any `protobuf:"bytes,3,rep,name=extensions,proto3" json:"extensions,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *HttpBody) Reset() { *m = HttpBody{} }
-func (m *HttpBody) String() string { return proto.CompactTextString(m) }
-func (*HttpBody) ProtoMessage() {}
-func (*HttpBody) Descriptor() ([]byte, []int) {
- return fileDescriptor_httpbody_45db50668f1dc1dc, []int{0}
-}
-func (m *HttpBody) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_HttpBody.Unmarshal(m, b)
-}
-func (m *HttpBody) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_HttpBody.Marshal(b, m, deterministic)
-}
-func (dst *HttpBody) XXX_Merge(src proto.Message) {
- xxx_messageInfo_HttpBody.Merge(dst, src)
-}
-func (m *HttpBody) XXX_Size() int {
- return xxx_messageInfo_HttpBody.Size(m)
-}
-func (m *HttpBody) XXX_DiscardUnknown() {
- xxx_messageInfo_HttpBody.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_HttpBody proto.InternalMessageInfo
-
-func (m *HttpBody) GetContentType() string {
- if m != nil {
- return m.ContentType
- }
- return ""
-}
-
-func (m *HttpBody) GetData() []byte {
- if m != nil {
- return m.Data
- }
- return nil
-}
-
-func (m *HttpBody) GetExtensions() []*any.Any {
- if m != nil {
- return m.Extensions
- }
- return nil
-}
-
-func init() {
- proto.RegisterType((*HttpBody)(nil), "google.api.HttpBody")
-}
-
-func init() { proto.RegisterFile("google/api/httpbody.proto", fileDescriptor_httpbody_45db50668f1dc1dc) }
-
-var fileDescriptor_httpbody_45db50668f1dc1dc = []byte{
- // 229 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0x31, 0x4f, 0xc3, 0x30,
- 0x10, 0x85, 0xe5, 0xb6, 0x42, 0x70, 0x2d, 0x0c, 0x16, 0x43, 0x60, 0x0a, 0x4c, 0x99, 0x6c, 0x09,
- 0xd8, 0x3a, 0x35, 0x0b, 0xb0, 0x45, 0x11, 0x13, 0x0b, 0x72, 0x1a, 0xe3, 0x46, 0x2a, 0x77, 0xa7,
- 0xe6, 0x10, 0xf8, 0xef, 0xf0, 0x2b, 0x19, 0x11, 0x69, 0x2c, 0xe8, 0xf6, 0xe4, 0xef, 0x3d, 0xbf,
- 0x77, 0x70, 0x11, 0x88, 0xc2, 0xd6, 0x5b, 0xc7, 0x9d, 0xdd, 0x88, 0x70, 0x43, 0x6d, 0x34, 0xbc,
- 0x23, 0x21, 0x0d, 0x7b, 0x64, 0x1c, 0x77, 0x97, 0xc9, 0x36, 0x90, 0xe6, 0xfd, 0xd5, 0x3a, 0x1c,
- 0x6d, 0xd7, 0x1f, 0x70, 0xfc, 0x20, 0xc2, 0x25, 0xb5, 0x51, 0x5f, 0xc1, 0x62, 0x4d, 0x28, 0x1e,
- 0xe5, 0x45, 0x22, 0xfb, 0x4c, 0xe5, 0xaa, 0x38, 0xa9, 0xe7, 0xe3, 0xdb, 0x53, 0x64, 0xaf, 0x35,
- 0xcc, 0x5a, 0x27, 0x2e, 0x9b, 0xe4, 0xaa, 0x58, 0xd4, 0x83, 0xd6, 0x77, 0x00, 0xfe, 0x53, 0x3c,
- 0xf6, 0x1d, 0x61, 0x9f, 0x4d, 0xf3, 0x69, 0x31, 0xbf, 0x39, 0x37, 0x63, 0x7d, 0xaa, 0x34, 0x2b,
- 0x8c, 0xf5, 0x3f, 0x5f, 0xb9, 0x81, 0xb3, 0x35, 0xbd, 0x99, 0xbf, 0x95, 0xe5, 0x69, 0x1a, 0x52,
- 0xfd, 0x66, 0x2a, 0xf5, 0xbc, 0x1c, 0x61, 0xa0, 0xad, 0xc3, 0x60, 0x68, 0x17, 0x6c, 0xf0, 0x38,
- 0xfc, 0x68, 0xf7, 0xc8, 0x71, 0xd7, 0x1f, 0x1c, 0xbf, 0x4c, 0xe2, 0x5b, 0xa9, 0xaf, 0xc9, 0xec,
- 0x7e, 0x55, 0x3d, 0x36, 0x47, 0x43, 0xe2, 0xf6, 0x27, 0x00, 0x00, 0xff, 0xff, 0x78, 0xb9, 0x16,
- 0x2b, 0x2d, 0x01, 0x00, 0x00,
-}
diff --git a/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go b/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go
deleted file mode 100644
index 86886693f..000000000
--- a/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go
+++ /dev/null
@@ -1,280 +0,0 @@
-// Code generated by protoc-gen-go. DO NOT EDIT.
-// source: google/protobuf/field_mask.proto
-
-package field_mask // import "google.golang.org/genproto/protobuf/field_mask"
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-// This is a compile-time assertion to ensure that this generated file
-// is compatible with the proto package it is being compiled against.
-// A compilation error at this line likely means your copy of the
-// proto package needs to be updated.
-const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
-
-// `FieldMask` represents a set of symbolic field paths, for example:
-//
-// paths: "f.a"
-// paths: "f.b.d"
-//
-// Here `f` represents a field in some root message, `a` and `b`
-// fields in the message found in `f`, and `d` a field found in the
-// message in `f.b`.
-//
-// Field masks are used to specify a subset of fields that should be
-// returned by a get operation or modified by an update operation.
-// Field masks also have a custom JSON encoding (see below).
-//
-// # Field Masks in Projections
-//
-// When used in the context of a projection, a response message or
-// sub-message is filtered by the API to only contain those fields as
-// specified in the mask. For example, if the mask in the previous
-// example is applied to a response message as follows:
-//
-// f {
-// a : 22
-// b {
-// d : 1
-// x : 2
-// }
-// y : 13
-// }
-// z: 8
-//
-// The result will not contain specific values for fields x,y and z
-// (their value will be set to the default, and omitted in proto text
-// output):
-//
-//
-// f {
-// a : 22
-// b {
-// d : 1
-// }
-// }
-//
-// A repeated field is not allowed except at the last position of a
-// paths string.
-//
-// If a FieldMask object is not present in a get operation, the
-// operation applies to all fields (as if a FieldMask of all fields
-// had been specified).
-//
-// Note that a field mask does not necessarily apply to the
-// top-level response message. In case of a REST get operation, the
-// field mask applies directly to the response, but in case of a REST
-// list operation, the mask instead applies to each individual message
-// in the returned resource list. In case of a REST custom method,
-// other definitions may be used. Where the mask applies will be
-// clearly documented together with its declaration in the API. In
-// any case, the effect on the returned resource/resources is required
-// behavior for APIs.
-//
-// # Field Masks in Update Operations
-//
-// A field mask in update operations specifies which fields of the
-// targeted resource are going to be updated. The API is required
-// to only change the values of the fields as specified in the mask
-// and leave the others untouched. If a resource is passed in to
-// describe the updated values, the API ignores the values of all
-// fields not covered by the mask.
-//
-// If a repeated field is specified for an update operation, new values will
-// be appended to the existing repeated field in the target resource. Note that
-// a repeated field is only allowed in the last position of a `paths` string.
-//
-// If a sub-message is specified in the last position of the field mask for an
-// update operation, then new value will be merged into the existing sub-message
-// in the target resource.
-//
-// For example, given the target message:
-//
-// f {
-// b {
-// d: 1
-// x: 2
-// }
-// c: [1]
-// }
-//
-// And an update message:
-//
-// f {
-// b {
-// d: 10
-// }
-// c: [2]
-// }
-//
-// then if the field mask is:
-//
-// paths: ["f.b", "f.c"]
-//
-// then the result will be:
-//
-// f {
-// b {
-// d: 10
-// x: 2
-// }
-// c: [1, 2]
-// }
-//
-// An implementation may provide options to override this default behavior for
-// repeated and message fields.
-//
-// In order to reset a field's value to the default, the field must
-// be in the mask and set to the default value in the provided resource.
-// Hence, in order to reset all fields of a resource, provide a default
-// instance of the resource and set all fields in the mask, or do
-// not provide a mask as described below.
-//
-// If a field mask is not present on update, the operation applies to
-// all fields (as if a field mask of all fields has been specified).
-// Note that in the presence of schema evolution, this may mean that
-// fields the client does not know and has therefore not filled into
-// the request will be reset to their default. If this is unwanted
-// behavior, a specific service may require a client to always specify
-// a field mask, producing an error if not.
-//
-// As with get operations, the location of the resource which
-// describes the updated values in the request message depends on the
-// operation kind. In any case, the effect of the field mask is
-// required to be honored by the API.
-//
-// ## Considerations for HTTP REST
-//
-// The HTTP kind of an update operation which uses a field mask must
-// be set to PATCH instead of PUT in order to satisfy HTTP semantics
-// (PUT must only be used for full updates).
-//
-// # JSON Encoding of Field Masks
-//
-// In JSON, a field mask is encoded as a single string where paths are
-// separated by a comma. Fields name in each path are converted
-// to/from lower-camel naming conventions.
-//
-// As an example, consider the following message declarations:
-//
-// message Profile {
-// User user = 1;
-// Photo photo = 2;
-// }
-// message User {
-// string display_name = 1;
-// string address = 2;
-// }
-//
-// In proto a field mask for `Profile` may look as such:
-//
-// mask {
-// paths: "user.display_name"
-// paths: "photo"
-// }
-//
-// In JSON, the same mask is represented as below:
-//
-// {
-// mask: "user.displayName,photo"
-// }
-//
-// # Field Masks and Oneof Fields
-//
-// Field masks treat fields in oneofs just as regular fields. Consider the
-// following message:
-//
-// message SampleMessage {
-// oneof test_oneof {
-// string name = 4;
-// SubMessage sub_message = 9;
-// }
-// }
-//
-// The field mask can be:
-//
-// mask {
-// paths: "name"
-// }
-//
-// Or:
-//
-// mask {
-// paths: "sub_message"
-// }
-//
-// Note that oneof type names ("test_oneof" in this case) cannot be used in
-// paths.
-//
-// ## Field Mask Verification
-//
-// The implementation of any API method which has a FieldMask type field in the
-// request should verify the included field paths, and return an
-// `INVALID_ARGUMENT` error if any path is duplicated or unmappable.
-type FieldMask struct {
- // The set of field mask paths.
- Paths []string `protobuf:"bytes,1,rep,name=paths,proto3" json:"paths,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
-}
-
-func (m *FieldMask) Reset() { *m = FieldMask{} }
-func (m *FieldMask) String() string { return proto.CompactTextString(m) }
-func (*FieldMask) ProtoMessage() {}
-func (*FieldMask) Descriptor() ([]byte, []int) {
- return fileDescriptor_field_mask_02a8b0c0831edcce, []int{0}
-}
-func (m *FieldMask) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_FieldMask.Unmarshal(m, b)
-}
-func (m *FieldMask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_FieldMask.Marshal(b, m, deterministic)
-}
-func (dst *FieldMask) XXX_Merge(src proto.Message) {
- xxx_messageInfo_FieldMask.Merge(dst, src)
-}
-func (m *FieldMask) XXX_Size() int {
- return xxx_messageInfo_FieldMask.Size(m)
-}
-func (m *FieldMask) XXX_DiscardUnknown() {
- xxx_messageInfo_FieldMask.DiscardUnknown(m)
-}
-
-var xxx_messageInfo_FieldMask proto.InternalMessageInfo
-
-func (m *FieldMask) GetPaths() []string {
- if m != nil {
- return m.Paths
- }
- return nil
-}
-
-func init() {
- proto.RegisterType((*FieldMask)(nil), "google.protobuf.FieldMask")
-}
-
-func init() {
- proto.RegisterFile("google/protobuf/field_mask.proto", fileDescriptor_field_mask_02a8b0c0831edcce)
-}
-
-var fileDescriptor_field_mask_02a8b0c0831edcce = []byte{
- // 175 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xcf, 0xcf, 0x4f,
- 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcb, 0x4c, 0xcd,
- 0x49, 0x89, 0xcf, 0x4d, 0x2c, 0xce, 0xd6, 0x03, 0x8b, 0x09, 0xf1, 0x43, 0x54, 0xe8, 0xc1, 0x54,
- 0x28, 0x29, 0x72, 0x71, 0xba, 0x81, 0x14, 0xf9, 0x26, 0x16, 0x67, 0x0b, 0x89, 0x70, 0xb1, 0x16,
- 0x24, 0x96, 0x64, 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x06, 0x41, 0x38, 0x4e, 0x3d, 0x8c,
- 0x5c, 0xc2, 0xc9, 0xf9, 0xb9, 0x7a, 0x68, 0x5a, 0x9d, 0xf8, 0xe0, 0x1a, 0x03, 0x40, 0x42, 0x01,
- 0x8c, 0x51, 0x96, 0x50, 0x25, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa,
- 0xe9, 0xa9, 0x79, 0x60, 0x0d, 0xd8, 0xdc, 0x64, 0x8d, 0x60, 0xfe, 0x60, 0x64, 0x5c, 0xc4, 0xc4,
- 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x48, 0x00, 0x54, 0x83, 0x5e, 0x78, 0x6a,
- 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x48, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0x24,
- 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfd, 0xda, 0xb7, 0xa8, 0xed, 0x00, 0x00, 0x00,
-}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 7a6f008b9..e7a818812 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -6,31 +6,29 @@ cloud.google.com/go/internal/optional
cloud.google.com/go/internal/trace
cloud.google.com/go/internal/version
cloud.google.com/go/compute/metadata
-# contrib.go.opencensus.io/exporter/ocagent v0.4.12
-contrib.go.opencensus.io/exporter/ocagent
-# github.com/Azure/azure-sdk-for-go v31.2.0+incompatible
+# github.com/Azure/azure-sdk-for-go v32.5.0+incompatible
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources
github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/storage/mgmt/storage
github.com/Azure/azure-sdk-for-go/storage
github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2016-02-01/resources
github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2016-01-01/storage
github.com/Azure/azure-sdk-for-go/version
-# github.com/Azure/go-autorest/autorest v0.5.0
+# github.com/Azure/go-autorest/autorest v0.9.0
github.com/Azure/go-autorest/autorest
github.com/Azure/go-autorest/autorest/azure
-# github.com/Azure/go-autorest/autorest/adal v0.2.0
+# github.com/Azure/go-autorest/autorest/adal v0.6.0
github.com/Azure/go-autorest/autorest/adal
-# github.com/Azure/go-autorest/autorest/azure/cli v0.1.0
+# github.com/Azure/go-autorest/autorest/azure/cli v0.2.0
github.com/Azure/go-autorest/autorest/azure/cli
-# github.com/Azure/go-autorest/autorest/date v0.1.0
+# github.com/Azure/go-autorest/autorest/date v0.2.0
github.com/Azure/go-autorest/autorest/date
-# github.com/Azure/go-autorest/autorest/to v0.2.0
+# github.com/Azure/go-autorest/autorest/to v0.3.0
github.com/Azure/go-autorest/autorest/to
-# github.com/Azure/go-autorest/autorest/validation v0.1.0
+# github.com/Azure/go-autorest/autorest/validation v0.2.0
github.com/Azure/go-autorest/autorest/validation
# github.com/Azure/go-autorest/logger v0.1.0
github.com/Azure/go-autorest/logger
-# github.com/Azure/go-autorest/tracing v0.1.0
+# github.com/Azure/go-autorest/tracing v0.5.0
github.com/Azure/go-autorest/tracing
# github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4
github.com/Azure/go-ntlmssp
@@ -137,13 +135,6 @@ github.com/bgentry/go-netrc/netrc
github.com/bgentry/speakeasy
# github.com/blang/semver v3.5.1+incompatible
github.com/blang/semver
-# github.com/census-instrumentation/opencensus-proto v0.2.0
-github.com/census-instrumentation/opencensus-proto/gen-go/agent/common/v1
-github.com/census-instrumentation/opencensus-proto/gen-go/agent/metrics/v1
-github.com/census-instrumentation/opencensus-proto/gen-go/agent/trace/v1
-github.com/census-instrumentation/opencensus-proto/gen-go/metrics/v1
-github.com/census-instrumentation/opencensus-proto/gen-go/resource/v1
-github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1
# github.com/chzyer/readline v0.0.0-20161106042343-c914be64f07d
github.com/chzyer/readline
# github.com/coreos/etcd v3.3.10+incompatible
@@ -185,16 +176,10 @@ github.com/golang/mock/gomock
# github.com/golang/protobuf v1.3.1
github.com/golang/protobuf/proto
github.com/golang/protobuf/ptypes
-github.com/golang/protobuf/ptypes/timestamp
github.com/golang/protobuf/ptypes/any
github.com/golang/protobuf/ptypes/duration
+github.com/golang/protobuf/ptypes/timestamp
github.com/golang/protobuf/protoc-gen-go/descriptor
-github.com/golang/protobuf/ptypes/wrappers
-github.com/golang/protobuf/jsonpb
-github.com/golang/protobuf/protoc-gen-go/generator
-github.com/golang/protobuf/ptypes/struct
-github.com/golang/protobuf/protoc-gen-go/generator/internal/remap
-github.com/golang/protobuf/protoc-gen-go/plugin
# github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db
github.com/golang/snappy
# github.com/google/go-cmp v0.3.0
@@ -301,10 +286,6 @@ github.com/gophercloud/gophercloud/openstack/db/v1/datastores
github.com/gophercloud/gophercloud/internal
# github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01
github.com/gophercloud/utils/openstack/clientconfig
-# github.com/grpc-ecosystem/grpc-gateway v1.8.5
-github.com/grpc-ecosystem/grpc-gateway/runtime
-github.com/grpc-ecosystem/grpc-gateway/utilities
-github.com/grpc-ecosystem/grpc-gateway/internal
# github.com/hashicorp/aws-sdk-go-base v0.3.0
github.com/hashicorp/aws-sdk-go-base
# github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089
@@ -314,7 +295,7 @@ github.com/hashicorp/consul/lib/freeport
github.com/hashicorp/consul/testutil/retry
# github.com/hashicorp/errwrap v1.0.0
github.com/hashicorp/errwrap
-# github.com/hashicorp/go-azure-helpers v0.5.0
+# github.com/hashicorp/go-azure-helpers v0.7.0
github.com/hashicorp/go-azure-helpers/authentication
github.com/hashicorp/go-azure-helpers/storage
# github.com/hashicorp/go-checkpoint v0.5.0
@@ -501,24 +482,22 @@ github.com/zclconf/go-cty/cty/set
# github.com/zclconf/go-cty-yaml v1.0.1
github.com/zclconf/go-cty-yaml
# go.opencensus.io v0.20.2
-go.opencensus.io/plugin/ochttp
-go.opencensus.io/plugin/ochttp/propagation/tracecontext
-go.opencensus.io/stats/view
go.opencensus.io/trace
-go.opencensus.io
-go.opencensus.io/plugin/ocgrpc
-go.opencensus.io/resource
-go.opencensus.io/stats
-go.opencensus.io/tag
+go.opencensus.io/plugin/ochttp
+go.opencensus.io/internal
+go.opencensus.io/trace/internal
go.opencensus.io/trace/tracestate
go.opencensus.io/plugin/ochttp/propagation/b3
+go.opencensus.io/stats
+go.opencensus.io/stats/view
+go.opencensus.io/tag
go.opencensus.io/trace/propagation
+go.opencensus.io
+go.opencensus.io/stats/internal
go.opencensus.io/internal/tagencoding
go.opencensus.io/metric/metricdata
go.opencensus.io/metric/metricproducer
-go.opencensus.io/stats/internal
-go.opencensus.io/internal
-go.opencensus.io/trace/internal
+go.opencensus.io/resource
# golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/agent
@@ -559,8 +538,6 @@ golang.org/x/oauth2
golang.org/x/oauth2/internal
golang.org/x/oauth2/jws
golang.org/x/oauth2/google
-# golang.org/x/sync v0.0.0-20190423024810-112230192c58
-golang.org/x/sync/semaphore
# golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82
golang.org/x/sys/unix
golang.org/x/sys/cpu
@@ -598,7 +575,6 @@ google.golang.org/api/googleapi/internal/uritemplates
google.golang.org/api/gensupport
google.golang.org/api/googleapi/transport
google.golang.org/api/transport/http/internal/propagation
-google.golang.org/api/support/bundler
# google.golang.org/appengine v1.4.0
google.golang.org/appengine/urlfetch
google.golang.org/appengine
@@ -616,8 +592,6 @@ google.golang.org/genproto/googleapis/iam/v1
google.golang.org/genproto/googleapis/rpc/status
google.golang.org/genproto/googleapis/rpc/code
google.golang.org/genproto/googleapis/api/annotations
-google.golang.org/genproto/googleapis/api/httpbody
-google.golang.org/genproto/protobuf/field_mask
# google.golang.org/grpc v1.19.1
google.golang.org/grpc
google.golang.org/grpc/test/bufconn