From c18793221278411b3943816eb6728ea3a4104a60 Mon Sep 17 00:00:00 2001
From: Igor Noskov
Date: Wed, 18 May 2016 20:11:29 +0600
Subject: [PATCH] add selectel image store
---
.gitignore | 3 +
Dockerfile | 2 +-
Godeps/Godeps.json | 33 +-
imagestore/factory.go | 16 +
imagestore/selectelstore.go | 65 +
.../PagerDuty/godspeed/gspdtest/gspdtest.go | 56 +
vendor/github.com/ernado/selectel/LICENSE | 21 +
.../ernado/selectel/storage/auth.go | 83 +
.../ernado/selectel/storage/container.go | 224 ++
.../ernado/selectel/storage/object.go | 113 +
.../ernado/selectel/storage/selctl/main.go | 436 ++
.../ernado/selectel/storage/storage.go | 393 ++
.../ernado/selectel/storage/upload.go | 129 +
.../x/crypto/ssh/terminal/terminal.go | 892 -----
.../golang.org/x/crypto/ssh/terminal/util.go | 128 -
.../x/crypto/ssh/terminal/util_bsd.go | 12 -
.../x/crypto/ssh/terminal/util_linux.go | 11 -
.../x/crypto/ssh/terminal/util_windows.go | 174 -
.../api/bigquery/v2/bigquery-api.json | 2209 -----------
.../api/bigquery/v2/bigquery-gen.go | 3531 -----------------
.../api/container/v1beta1/container-api.json | 621 ---
.../api/container/v1beta1/container-gen.go | 1077 -----
.../api/pubsub/v1beta2/pubsub-api.json | 679 ----
.../api/pubsub/v1beta2/pubsub-gen.go | 1458 -------
.../google.golang.org/appengine/.travis.yml | 18 -
vendor/google.golang.org/appengine/LICENSE | 202 -
vendor/google.golang.org/appengine/README.md | 75 -
.../google.golang.org/appengine/aetest/doc.go | 42 -
.../appengine/aetest/instance.go | 51 -
.../appengine/aetest/instance_classic.go | 21 -
.../appengine/aetest/instance_vm.go | 276 --
.../appengine/aetest/user.go | 36 -
.../google.golang.org/appengine/appengine.go | 76 -
.../appengine/appengine_vm.go | 56 -
.../appengine/blobstore/blobstore.go | 276 --
.../appengine/blobstore/read.go | 160 -
.../appengine/capability/capability.go | 52 -
.../appengine/channel/channel.go | 83 -
.../appengine/cloudsql/cloudsql.go | 62 -
.../appengine/cloudsql/cloudsql_classic.go | 17 -
.../appengine/cloudsql/cloudsql_vm.go | 16 -
.../appengine/cmd/aebundler/aebundler.go | 342 --
.../appengine/cmd/aedeploy/aedeploy.go | 264 --
.../appengine/datastore/datastore.go | 406 --
.../appengine/datastore/doc.go | 351 --
.../appengine/datastore/key.go | 309 --
.../appengine/datastore/load.go | 334 --
.../appengine/datastore/metadata.go | 79 -
.../appengine/datastore/prop.go | 294 --
.../appengine/datastore/query.go | 713 ----
.../appengine/datastore/save.go | 300 --
.../appengine/datastore/transaction.go | 87 -
.../appengine/delay/delay.go | 275 --
.../appengine/demos/guestbook/app.yaml | 17 -
.../appengine/demos/guestbook/favicon.ico | Bin 1150 -> 0 bytes
.../appengine/demos/guestbook/guestbook.go | 109 -
.../appengine/demos/guestbook/index.yaml | 7 -
.../demos/guestbook/templates/guestbook.html | 26 -
.../appengine/demos/helloworld/app.yaml | 13 -
.../appengine/demos/helloworld/favicon.ico | Bin 1150 -> 0 bytes
.../appengine/demos/helloworld/helloworld.go | 50 -
vendor/google.golang.org/appengine/errors.go | 46 -
.../google.golang.org/appengine/file/file.go | 28 -
.../google.golang.org/appengine/identity.go | 142 -
.../appengine/image/image.go | 67 -
.../appengine/internal/aetesting/fake.go | 80 -
.../appengine/internal/api.go | 640 ---
.../appengine/internal/api_classic.go | 133 -
.../appengine/internal/api_common.go | 101 -
.../appengine/internal/app_id.go | 28 -
.../app_identity/app_identity_service.pb.go | 296 --
.../app_identity/app_identity_service.proto | 64 -
.../appengine/internal/base/api_base.pb.go | 133 -
.../appengine/internal/base/api_base.proto | 33 -
.../blobstore/blobstore_service.pb.go | 347 --
.../blobstore/blobstore_service.proto | 71 -
.../capability/capability_service.pb.go | 125 -
.../capability/capability_service.proto | 28 -
.../internal/channel/channel_service.pb.go | 154 -
.../internal/channel/channel_service.proto | 30 -
.../internal/datastore/datastore_v3.pb.go | 2778 -------------
.../internal/datastore/datastore_v3.proto | 541 ---
.../appengine/internal/identity.go | 14 -
.../appengine/internal/identity_classic.go | 27 -
.../appengine/internal/identity_vm.go | 97 -
.../internal/image/images_service.pb.go | 845 ----
.../internal/image/images_service.proto | 162 -
.../appengine/internal/internal.go | 144 -
.../appengine/internal/log/log_service.pb.go | 899 -----
.../appengine/internal/log/log_service.proto | 150 -
.../internal/mail/mail_service.pb.go | 229 --
.../internal/mail/mail_service.proto | 45 -
.../internal/memcache/memcache_service.pb.go | 938 -----
.../internal/memcache/memcache_service.proto | 165 -
.../appengine/internal/metadata.go | 61 -
.../internal/modules/modules_service.pb.go | 375 --
.../internal/modules/modules_service.proto | 80 -
.../appengine/internal/net.go | 56 -
.../appengine/internal/regen.sh | 40 -
.../internal/remote_api/remote_api.pb.go | 231 --
.../internal/remote_api/remote_api.proto | 44 -
.../appengine/internal/search/search.pb.go | 2127 ----------
.../appengine/internal/search/search.proto | 388 --
.../internal/socket/socket_service.pb.go | 1858 ---------
.../internal/socket/socket_service.proto | 460 ---
.../internal/system/system_service.pb.go | 198 -
.../internal/system/system_service.proto | 49 -
.../taskqueue/taskqueue_service.pb.go | 1888 ---------
.../taskqueue/taskqueue_service.proto | 342 --
.../appengine/internal/transaction.go | 107 -
.../internal/urlfetch/urlfetch_service.pb.go | 355 --
.../internal/urlfetch/urlfetch_service.proto | 64 -
.../internal/user/user_service.pb.go | 289 --
.../internal/user/user_service.proto | 58 -
.../internal/xmpp/xmpp_service.pb.go | 427 --
.../internal/xmpp/xmpp_service.proto | 83 -
vendor/google.golang.org/appengine/log/api.go | 40 -
vendor/google.golang.org/appengine/log/log.go | 323 --
.../google.golang.org/appengine/mail/mail.go | 123 -
.../appengine/memcache/memcache.go | 526 ---
.../appengine/module/module.go | 113 -
.../google.golang.org/appengine/namespace.go | 25 -
.../appengine/remote_api/client.go | 174 -
.../appengine/remote_api/remote_api.go | 152 -
.../appengine/runtime/runtime.go | 148 -
.../google.golang.org/appengine/search/doc.go | 205 -
.../appengine/search/field.go | 82 -
.../appengine/search/search.go | 1109 ------
.../appengine/search/struct.go | 245 --
.../google.golang.org/appengine/socket/doc.go | 10 -
.../appengine/socket/socket_classic.go | 290 --
.../appengine/socket/socket_vm.go | 64 -
.../appengine/taskqueue/taskqueue.go | 496 ---
vendor/google.golang.org/appengine/timeout.go | 20 -
.../appengine/urlfetch/urlfetch.go | 210 -
.../google.golang.org/appengine/user/oauth.go | 52 -
.../google.golang.org/appengine/user/user.go | 84 -
.../appengine/user/user_classic.go | 35 -
.../appengine/user/user_vm.go | 38 -
.../google.golang.org/appengine/xmpp/xmpp.go | 253 --
140 files changed, 1549 insertions(+), 40289 deletions(-)
create mode 100644 imagestore/selectelstore.go
create mode 100644 vendor/github.com/PagerDuty/godspeed/gspdtest/gspdtest.go
create mode 100644 vendor/github.com/ernado/selectel/LICENSE
create mode 100644 vendor/github.com/ernado/selectel/storage/auth.go
create mode 100644 vendor/github.com/ernado/selectel/storage/container.go
create mode 100644 vendor/github.com/ernado/selectel/storage/object.go
create mode 100644 vendor/github.com/ernado/selectel/storage/selctl/main.go
create mode 100644 vendor/github.com/ernado/selectel/storage/storage.go
create mode 100644 vendor/github.com/ernado/selectel/storage/upload.go
delete mode 100644 vendor/golang.org/x/crypto/ssh/terminal/terminal.go
delete mode 100644 vendor/golang.org/x/crypto/ssh/terminal/util.go
delete mode 100644 vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go
delete mode 100644 vendor/golang.org/x/crypto/ssh/terminal/util_linux.go
delete mode 100644 vendor/golang.org/x/crypto/ssh/terminal/util_windows.go
delete mode 100644 vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
delete mode 100644 vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
delete mode 100644 vendor/google.golang.org/api/container/v1beta1/container-api.json
delete mode 100644 vendor/google.golang.org/api/container/v1beta1/container-gen.go
delete mode 100644 vendor/google.golang.org/api/pubsub/v1beta2/pubsub-api.json
delete mode 100644 vendor/google.golang.org/api/pubsub/v1beta2/pubsub-gen.go
delete mode 100644 vendor/google.golang.org/appengine/.travis.yml
delete mode 100644 vendor/google.golang.org/appengine/LICENSE
delete mode 100644 vendor/google.golang.org/appengine/README.md
delete mode 100644 vendor/google.golang.org/appengine/aetest/doc.go
delete mode 100644 vendor/google.golang.org/appengine/aetest/instance.go
delete mode 100644 vendor/google.golang.org/appengine/aetest/instance_classic.go
delete mode 100644 vendor/google.golang.org/appengine/aetest/instance_vm.go
delete mode 100644 vendor/google.golang.org/appengine/aetest/user.go
delete mode 100644 vendor/google.golang.org/appengine/appengine.go
delete mode 100644 vendor/google.golang.org/appengine/appengine_vm.go
delete mode 100644 vendor/google.golang.org/appengine/blobstore/blobstore.go
delete mode 100644 vendor/google.golang.org/appengine/blobstore/read.go
delete mode 100644 vendor/google.golang.org/appengine/capability/capability.go
delete mode 100644 vendor/google.golang.org/appengine/channel/channel.go
delete mode 100644 vendor/google.golang.org/appengine/cloudsql/cloudsql.go
delete mode 100644 vendor/google.golang.org/appengine/cloudsql/cloudsql_classic.go
delete mode 100644 vendor/google.golang.org/appengine/cloudsql/cloudsql_vm.go
delete mode 100644 vendor/google.golang.org/appengine/cmd/aebundler/aebundler.go
delete mode 100644 vendor/google.golang.org/appengine/cmd/aedeploy/aedeploy.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/datastore.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/doc.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/key.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/load.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/metadata.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/prop.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/query.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/save.go
delete mode 100644 vendor/google.golang.org/appengine/datastore/transaction.go
delete mode 100644 vendor/google.golang.org/appengine/delay/delay.go
delete mode 100644 vendor/google.golang.org/appengine/demos/guestbook/app.yaml
delete mode 100644 vendor/google.golang.org/appengine/demos/guestbook/favicon.ico
delete mode 100644 vendor/google.golang.org/appengine/demos/guestbook/guestbook.go
delete mode 100644 vendor/google.golang.org/appengine/demos/guestbook/index.yaml
delete mode 100644 vendor/google.golang.org/appengine/demos/guestbook/templates/guestbook.html
delete mode 100644 vendor/google.golang.org/appengine/demos/helloworld/app.yaml
delete mode 100644 vendor/google.golang.org/appengine/demos/helloworld/favicon.ico
delete mode 100644 vendor/google.golang.org/appengine/demos/helloworld/helloworld.go
delete mode 100644 vendor/google.golang.org/appengine/errors.go
delete mode 100644 vendor/google.golang.org/appengine/file/file.go
delete mode 100644 vendor/google.golang.org/appengine/identity.go
delete mode 100644 vendor/google.golang.org/appengine/image/image.go
delete mode 100644 vendor/google.golang.org/appengine/internal/aetesting/fake.go
delete mode 100644 vendor/google.golang.org/appengine/internal/api.go
delete mode 100644 vendor/google.golang.org/appengine/internal/api_classic.go
delete mode 100644 vendor/google.golang.org/appengine/internal/api_common.go
delete mode 100644 vendor/google.golang.org/appengine/internal/app_id.go
delete mode 100644 vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/base/api_base.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/base/api_base.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/capability/capability_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/capability/capability_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/channel/channel_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/channel/channel_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/identity.go
delete mode 100644 vendor/google.golang.org/appengine/internal/identity_classic.go
delete mode 100644 vendor/google.golang.org/appengine/internal/identity_vm.go
delete mode 100644 vendor/google.golang.org/appengine/internal/image/images_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/image/images_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/internal.go
delete mode 100644 vendor/google.golang.org/appengine/internal/log/log_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/log/log_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/mail/mail_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/mail/mail_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/memcache/memcache_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/memcache/memcache_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/metadata.go
delete mode 100644 vendor/google.golang.org/appengine/internal/modules/modules_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/modules/modules_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/net.go
delete mode 100644 vendor/google.golang.org/appengine/internal/regen.sh
delete mode 100644 vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/search/search.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/search/search.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/socket/socket_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/socket/socket_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/system/system_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/system/system_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/transaction.go
delete mode 100644 vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/user/user_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/user/user_service.proto
delete mode 100644 vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.pb.go
delete mode 100644 vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.proto
delete mode 100644 vendor/google.golang.org/appengine/log/api.go
delete mode 100644 vendor/google.golang.org/appengine/log/log.go
delete mode 100644 vendor/google.golang.org/appengine/mail/mail.go
delete mode 100644 vendor/google.golang.org/appengine/memcache/memcache.go
delete mode 100644 vendor/google.golang.org/appengine/module/module.go
delete mode 100644 vendor/google.golang.org/appengine/namespace.go
delete mode 100644 vendor/google.golang.org/appengine/remote_api/client.go
delete mode 100644 vendor/google.golang.org/appengine/remote_api/remote_api.go
delete mode 100644 vendor/google.golang.org/appengine/runtime/runtime.go
delete mode 100644 vendor/google.golang.org/appengine/search/doc.go
delete mode 100644 vendor/google.golang.org/appengine/search/field.go
delete mode 100644 vendor/google.golang.org/appengine/search/search.go
delete mode 100644 vendor/google.golang.org/appengine/search/struct.go
delete mode 100644 vendor/google.golang.org/appengine/socket/doc.go
delete mode 100644 vendor/google.golang.org/appengine/socket/socket_classic.go
delete mode 100644 vendor/google.golang.org/appengine/socket/socket_vm.go
delete mode 100644 vendor/google.golang.org/appengine/taskqueue/taskqueue.go
delete mode 100644 vendor/google.golang.org/appengine/timeout.go
delete mode 100644 vendor/google.golang.org/appengine/urlfetch/urlfetch.go
delete mode 100644 vendor/google.golang.org/appengine/user/oauth.go
delete mode 100644 vendor/google.golang.org/appengine/user/user.go
delete mode 100644 vendor/google.golang.org/appengine/user/user_classic.go
delete mode 100644 vendor/google.golang.org/appengine/user/user_vm.go
delete mode 100644 vendor/google.golang.org/appengine/xmpp/xmpp.go
diff --git a/.gitignore b/.gitignore
index f7ff514..ab3d931 100644
--- a/.gitignore
+++ b/.gitignore
@@ -29,3 +29,6 @@ tags
*.sw[o-p]
profile.cov
+
+#IDEA project files
+.idea
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index d5b73f4..7637d1a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM golang:1.5
+FROM golang:1.6
RUN apt-get update && apt-get install -yqq aspell aspell-en libaspell-dev tesseract-ocr tesseract-ocr-eng imagemagick optipng exiftool libjpeg-progs webp
ADD docker/meme.traineddata /usr/share/tesseract-ocr/tessdata/meme.traineddata
ADD docker/imagemagick_policy.xml /etc/ImageMagick-6/policy.xml
diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json
index d08750f..bb2bc0e 100644
--- a/Godeps/Godeps.json
+++ b/Godeps/Godeps.json
@@ -1,14 +1,19 @@
{
"ImportPath": "github.com/Imgur/mandible",
- "GoVersion": "go1.5",
- "Packages": [
- "./..."
- ],
+ "GoVersion": "go1.6",
"Deps": [
+ {
+ "ImportPath": "github.com/PagerDuty/godspeed",
+ "Rev": "ef757b820a7d6760a89641ac29541967eb6d9f05"
+ },
{
"ImportPath": "github.com/bradfitz/http2",
"Rev": "f8202bc903bda493ebba4aa54922d78430c2c42f"
},
+ {
+ "ImportPath": "github.com/ernado/selectel/storage",
+ "Rev": "10f520cf8312d6729981b6cd30f1085d0fce436b"
+ },
{
"ImportPath": "github.com/golang/glog",
"Rev": "44145f04b68cf362d9c4df2182967c2275eaefed"
@@ -41,10 +46,6 @@
"ImportPath": "github.com/vaughan0/go-ini",
"Rev": "a98ad7ee00ec53921f08832bc06ecf7fd600e6a1"
},
- {
- "ImportPath": "golang.org/x/crypto/ssh/terminal",
- "Rev": "3760e016850398b85094c4c99e955b8c3dea5711"
- },
{
"ImportPath": "golang.org/x/net/context",
"Rev": "84afb0af0050ae286aa9ced0c29383c2a866a925"
@@ -53,30 +54,14 @@
"ImportPath": "golang.org/x/oauth2",
"Rev": "b5adcc2dcdf009d0391547edc6ecbaff889f5bb9"
},
- {
- "ImportPath": "google.golang.org/api/bigquery/v2",
- "Rev": "0610a35668fd6881bec389e74208f0df92010e96"
- },
- {
- "ImportPath": "google.golang.org/api/container/v1beta1",
- "Rev": "0610a35668fd6881bec389e74208f0df92010e96"
- },
{
"ImportPath": "google.golang.org/api/googleapi",
"Rev": "0610a35668fd6881bec389e74208f0df92010e96"
},
- {
- "ImportPath": "google.golang.org/api/pubsub/v1beta2",
- "Rev": "0610a35668fd6881bec389e74208f0df92010e96"
- },
{
"ImportPath": "google.golang.org/api/storage/v1",
"Rev": "0610a35668fd6881bec389e74208f0df92010e96"
},
- {
- "ImportPath": "google.golang.org/appengine",
- "Rev": "6bde959377a90acb53366051d7d587bfd7171354"
- },
{
"ImportPath": "google.golang.org/cloud",
"Rev": "0b21ed5434dc279f2b8ea3c02dc69135600bbb8b"
diff --git a/imagestore/factory.go b/imagestore/factory.go
index 7e15320..2c3a8b3 100644
--- a/imagestore/factory.go
+++ b/imagestore/factory.go
@@ -8,6 +8,7 @@ import (
"github.com/mitchellh/goamz/aws"
"github.com/mitchellh/goamz/s3"
+ selectel "github.com/ernado/selectel/storage"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
gcloud "google.golang.org/cloud"
@@ -40,6 +41,9 @@ func (this *Factory) NewImageStores() ImageStore {
case "memory":
store = NewInMemoryImageStore()
stores = append(stores, store)
+ case "selectel":
+ store = this.NewSelectelStore(configWrapper)
+ stores = append(stores, store)
default:
log.Fatalf("Unsupported store %s", configWrapper["Type"])
}
@@ -121,3 +125,15 @@ func (this *Factory) NewHashGenerator(store ImageStore) *HashGenerator {
hashGen.init()
return hashGen
}
+
+func (this *Factory) NewSelectelStore(conf map[string]string) ImageStore {
+ user, key, container, rootPath := conf["user"], conf["key"], conf["container"], conf["rootPath"]
+ client, err := selectel.New(user, key)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ mapper := NewNamePathMapper(conf["NamePathRegex"], conf["NamePathMap"])
+
+ return NewSelectelImageStore(client, mapper, container, rootPath)
+}
diff --git a/imagestore/selectelstore.go b/imagestore/selectelstore.go
new file mode 100644
index 0000000..eb2fb3e
--- /dev/null
+++ b/imagestore/selectelstore.go
@@ -0,0 +1,65 @@
+package imagestore
+
+import (
+ "fmt"
+ "github.com/ernado/selectel/storage"
+ "io"
+ "os"
+ "path"
+)
+
+type SelectelStore struct {
+ client storage.API
+ storeRoot string
+ container string
+ namePathMapper *NamePathMapper
+}
+
+func NewSelectelImageStore(client storage.API, mapper *NamePathMapper, container, root string) *SelectelStore {
+ return &SelectelStore{
+ client: client,
+ namePathMapper: mapper,
+ container: container,
+ storeRoot: root,
+ }
+}
+
+func (s *SelectelStore) Save(src string, obj *StoreObject) (*StoreObject, error) {
+ f, err := os.Open(src)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ pathToFile := s.toPath(obj)
+ container, name := s.toSelectelPath(pathToFile)
+ if err := s.client.Upload(f, container, name, obj.MimeType); err != nil {
+ return nil, fmt.Errorf("Selectel api returns error: %v", err)
+ }
+
+ obj.Url = s.client.URL(container, name)
+
+ return obj, nil
+}
+func (s *SelectelStore) Exists(obj *StoreObject) (bool, error) {
+ pathToFile := s.toPath(obj)
+ container, name := s.toSelectelPath(pathToFile)
+ _, err := s.client.ObjectInfo(container, name)
+ return err == nil, nil
+}
+func (s *SelectelStore) Get(obj *StoreObject) (io.ReadCloser, error) {
+ pathToFile := s.toPath(obj)
+ container, name := s.toSelectelPath(pathToFile)
+ return s.client.C(container).Object(name).GetReader()
+}
+func (s *SelectelStore) String() string {
+ return "SelectelStore"
+}
+
+func (s *SelectelStore) toPath(obj *StoreObject) string {
+ return s.storeRoot + "/" + s.namePathMapper.mapToPath(obj)
+}
+
+func (s *SelectelStore) toSelectelPath(fullPath string) (string, string) {
+ return path.Join(s.container, path.Dir(fullPath)), path.Base(fullPath)
+}
diff --git a/vendor/github.com/PagerDuty/godspeed/gspdtest/gspdtest.go b/vendor/github.com/PagerDuty/godspeed/gspdtest/gspdtest.go
new file mode 100644
index 0000000..4527db5
--- /dev/null
+++ b/vendor/github.com/PagerDuty/godspeed/gspdtest/gspdtest.go
@@ -0,0 +1,56 @@
+// Copyright 2014-2015 PagerDuty, Inc, et al. All rights reserved.
+// Use of this source code is governed by the BSD 3-Clause
+// license that can be found in the LICENSE file.
+
+// Package gspdtest is a package used by Godspeed for testing. This package
+// isn't really meant to be consumed by anyone.
+package gspdtest
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+)
+
+// Listener is a function which takes a *net.UDPConn and sends any data received
+// on it back over the c channel. This function is meant to be ran within a
+// goroutine. The ctrl channel is used to shut down the goroutine.
+func Listener(l *net.UDPConn, ctrl chan int, c chan []byte) {
+ for {
+ select {
+ case _, ok := <-ctrl:
+ if !ok {
+ close(c)
+ return
+ }
+ default:
+ buffer := make([]byte, 8193)
+
+ _, err := l.Read(buffer)
+
+ if err != nil {
+ continue
+ }
+
+ c <- bytes.Trim(buffer, "\x00")
+ }
+ }
+}
+
+// BuildListener is a function which builds a *net.UDPConn listening on localhost
+// on the port specified. It also returns a control channel and a return channel.
+func BuildListener(port int) (*net.UDPConn, chan int, chan []byte) {
+ addr, err := net.ResolveUDPAddr("udp", fmt.Sprintf("127.0.0.1:%d", port))
+
+ if err != nil {
+ panic(fmt.Sprintf("getting address for test listener failed, bailing out. Here's everything I know: %v", err))
+ }
+
+ l, err := net.ListenUDP("udp", addr)
+
+ if err != nil {
+ panic(fmt.Sprintf("unable to listen for traffic: %v", err))
+ }
+
+ return l, make(chan int), make(chan []byte)
+}
diff --git a/vendor/github.com/ernado/selectel/LICENSE b/vendor/github.com/ernado/selectel/LICENSE
new file mode 100644
index 0000000..d9d0d2e
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Aleksandr Razumov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/ernado/selectel/storage/auth.go b/vendor/github.com/ernado/selectel/storage/auth.go
new file mode 100644
index 0000000..4e624d2
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/auth.go
@@ -0,0 +1,83 @@
+package storage
+
+import (
+ "errors"
+ "net/http"
+ "net/url"
+ "strconv"
+ "time"
+)
+
+const (
+ authURL = "https://auth.selcdn.ru/"
+ authUserHeader = "X-Auth-User"
+ authKeyHeader = "X-Auth-Key"
+ authExpireHeader = "X-Expire-Auth-Token"
+ storageURLHeader = "X-Storage-Url"
+ // tokenDurationAdd used to reduce duration of token
+ // to re-auth before token gets expired
+ tokenDurationAdd = 10 * time.Second
+)
+
+var (
+ // ErrorAuth occurs when client is unable to authenticate
+ ErrorAuth = errors.New("Authentication error")
+ // ErrorBadCredentials occurs when incorrect user/key provided
+ ErrorBadCredentials = errors.New("Bad auth credentials provided")
+)
+
+// Token returns current auth token
+func (c *Client) Token() string {
+ return c.token
+}
+
+// Auth performs authentication to selectel and stores token and storage url
+func (c *Client) Auth(user, key string) error {
+ if blank(user) || blank(key) {
+ return ErrorBadCredentials
+ }
+
+ request, _ := http.NewRequest(getMethod, authURL, nil)
+ request.Header.Add(authUserHeader, user)
+ request.Header.Add(authKeyHeader, key)
+
+ res, err := c.do(request)
+ if err != nil {
+ return err
+ }
+ defer res.Body.Close()
+
+ if res.StatusCode != http.StatusNoContent {
+ return ErrorAuth
+ }
+ expire, err := strconv.Atoi(res.Header.Get(authExpireHeader))
+ if err != nil {
+ return err
+ }
+
+ c.tokenExpire = expire
+ c.token = res.Header.Get(authTokenHeader)
+ if blank(c.token) {
+ return ErrorAuth
+ }
+ c.storageURL, err = url.Parse(res.Header.Get(storageURLHeader))
+ if err != nil || blank(c.storageURL.String()) {
+ return ErrorAuth
+ }
+
+ c.user, c.key = user, key
+ now := time.Now()
+ c.expireFrom = &now
+
+ return nil
+}
+
+// Expired returns true if token is expired or does not exist
+func (c *Client) Expired() bool {
+ if c.expireFrom == nil || blank(c.token) {
+ return true
+ }
+ duration := time.Duration(c.tokenExpire) * time.Second
+ expiredFrom := c.expireFrom.Add(duration).Add(tokenDurationAdd)
+ return expiredFrom.Before(time.Now())
+}
diff --git a/vendor/github.com/ernado/selectel/storage/container.go b/vendor/github.com/ernado/selectel/storage/container.go
new file mode 100644
index 0000000..6e6b69e
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/container.go
@@ -0,0 +1,224 @@
+package storage
+
+import (
+ "errors"
+ "io"
+ "net/http"
+ "strconv"
+)
+
+const (
+ containerMetaTypeHeader = "X-Container-Meta-Type"
+ containerPublic = "public"
+ containerPrivate = "private"
+ containerBytesUserHeader = "X-Container-Bytes-Used"
+ containerObjectCountHeader = "X-Container-Object-Count"
+)
+
+var (
+ // ErrorConianerNotEmpty occurs when requested container is not empty
+ ErrorConianerNotEmpty = errors.New("Unable to remove container with objects")
+)
+
+// Container is realization of ContainerAPI
+type Container struct {
+ name string
+ api API
+}
+
+// ContainerInfo is information about container
+type ContainerInfo struct {
+ BytesUsed uint64 `json:"bytes"`
+ ObjectCount uint64 `json:"count"`
+ Name string `json:"name"`
+ RecievedBytes uint64 `json:"rx_bytes"`
+ TransferedBytes uint64 `json:"tx_bytes"`
+ Type string `json:"type"`
+}
+
+// ContainerAPI is interface for selectel storage container
+type ContainerAPI interface {
+ Name() string
+ Upload(reader io.Reader, name, contentType string) error
+ UploadFile(filename string) error
+ URL(filename string) string
+ RemoveObject(name string) error
+ // Remove removes current container
+ Remove() error
+ // Create creates current container
+ Create(bool) error
+ // ObjectInfo returns info about object in container
+ ObjectInfo(name string) (ObjectInfo, error)
+ // Object returns object from container
+ Object(name string) ObjectAPI
+ ObjectsInfo() ([]ObjectInfo, error)
+ Objects() ([]ObjectAPI, error)
+ Info() (info ContainerInfo, err error)
+}
+
+// Upload reads all data from reader and uploads to contaier with filename and content type
+// shortcut to API.Upload
+func (c *Container) Upload(reader io.Reader, filename, contentType string) error {
+ return c.api.Upload(reader, c.name, filename, contentType)
+}
+
+// Name returns container name
+func (c *Container) Name() string {
+ return c.name
+}
+
+// Remove removes current container
+func (c *Container) Remove() error {
+ return c.api.RemoveContainer(c.name)
+}
+
+// Create creates current container
+func (c *Container) Create(private bool) error {
+ container, err := c.api.CreateContainer(c.name, private)
+ if err != nil {
+ return err
+ }
+ *c = *container.(*Container)
+ return nil
+}
+
+// URL returns url for object
+func (c *Container) URL(filename string) string {
+ return c.api.URL(c.name, filename)
+}
+
+// UploadFile to current container. Shortcut to API.UploadFile
+func (c *Container) UploadFile(filename string) error {
+ return c.api.UploadFile(filename, c.name)
+}
+
+// DeleteObject is shortcut to API.DeleteObject
+func (c *Container) RemoveObject(filename string) error {
+ return c.api.RemoveObject(c.name, filename)
+}
+
+func (c *Container) ObjectInfo(name string) (ObjectInfo, error) {
+ return c.api.ObjectInfo(c.name, name)
+}
+
+func (c *Container) Object(name string) ObjectAPI {
+ object := new(Object)
+ object.api = c.api
+ object.container = c
+ object.name = name
+ return object
+}
+
+// ObjectsInfo returns information about all objects in container
+func (c *Container) ObjectsInfo() ([]ObjectInfo, error) {
+ return c.api.ObjectsInfo(c.name)
+}
+
+// Objects returns all object from container
+func (c *Container) Objects() ([]ObjectAPI, error) {
+ info, err := c.ObjectsInfo()
+ if err != nil {
+ return nil, err
+ }
+ objects := []ObjectAPI{}
+ for _, object := range info {
+ objects = append(objects, c.Object(object.Name))
+ }
+ return objects, nil
+}
+
+func (c *Container) Info() (info ContainerInfo, err error) {
+ return c.api.ContainerInfo(c.name)
+}
+
+// C is shortcut to Client.Container
+func (c *Client) C(name string) ContainerAPI {
+ container := new(Container)
+ container.name = name
+ container.api = c
+ return container
+}
+
+// Container returns new ContainerAPI client binted to container name
+// Does no checks for container existance
+func (c *Client) Container(name string) ContainerAPI {
+ return c.C(name)
+}
+
+// CreateContainer creates new container and retuns it.
+// If container already exists, function will return existing container
+func (c *Client) CreateContainer(name string, private bool) (ContainerAPI, error) {
+ req, err := c.NewRequest(putMethod, nil, name)
+ if err != nil {
+ return nil, err
+ }
+ req.Header = http.Header{}
+ containerType := containerPublic
+ if private {
+ containerType = containerPrivate
+ }
+ req.Header.Add(containerMetaTypeHeader, containerType)
+ res, err := c.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ if res.StatusCode == http.StatusCreated || res.StatusCode == http.StatusAccepted {
+ return c.Container(name), nil
+ }
+ return nil, ErrorBadResponce
+}
+
+// RemoveContainer removes container with provided name
+// Container should be empty before removing and must exist
+func (c *Client) RemoveContainer(name string) error {
+ req, err := c.NewRequest(deleteMethod, nil, name)
+ if err != nil {
+ return err
+ }
+ res, err := c.Do(req)
+ if err != nil {
+ return err
+ }
+ if res.StatusCode == http.StatusConflict {
+ return ErrorConianerNotEmpty
+ }
+ if res.StatusCode == http.StatusNotFound {
+ return ErrorObjectNotFound
+ }
+ if res.StatusCode == http.StatusNoContent {
+ return nil
+ }
+ return ErrorBadResponce
+}
+
+func (c *Client) ContainerInfo(name string) (info ContainerInfo, err error) {
+ req, err := c.NewRequest(headMethod, nil, name)
+ if err != nil {
+ return
+ }
+ res, err := c.Do(req)
+ if err != nil {
+ return
+ }
+
+ if res.StatusCode == http.StatusNotFound {
+ return info, ErrorObjectNotFound
+ }
+
+ if res.StatusCode != http.StatusNoContent {
+ return info, ErrorBadResponce
+ }
+
+ parse := func(key string) uint64 {
+ v, _ := strconv.ParseUint(res.Header.Get(key), uint64Base, uint64BitSize)
+ return v
+ }
+
+ info.RecievedBytes = parse(recievedBytesHeader)
+ info.TransferedBytes = parse(transferedBytesHeader)
+ info.BytesUsed = parse(containerBytesUserHeader)
+ info.Type = res.Header.Get(containerMetaTypeHeader)
+ info.ObjectCount = parse(containerObjectCountHeader)
+
+ return
+}
diff --git a/vendor/github.com/ernado/selectel/storage/object.go b/vendor/github.com/ernado/selectel/storage/object.go
new file mode 100644
index 0000000..be5abc8
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/object.go
@@ -0,0 +1,113 @@
+package storage
+
+import (
+ "io"
+ "io/ioutil"
+ "net/http"
+ "strconv"
+ "time"
+)
+
+const (
+ etagHeader = "etag"
+ contentLengthHeader = "Content-Length"
+ lastModifiedLayout = time.RFC1123
+ lastModifiedHeader = "last-modified"
+ objectDownloadsHeader = "X-Object-Downloads"
+)
+
+// ObjectInfo represents object info
+type ObjectInfo struct {
+ Size uint64 `json:"bytes"`
+ ContentType string `json:"content_type"`
+ Downloaded uint64 `json:"downloaded"`
+ Hash string `json:"hash"`
+ LastModifiedStr string `json:"last_modified"`
+ LastModified time.Time `json:"-"`
+ Name string `json:"name"`
+}
+
+type Object struct {
+ name string
+ container ContainerAPI
+ api API
+}
+
+type ObjectAPI interface {
+ Info() (ObjectInfo, error)
+ Remove() error
+ Download() ([]byte, error)
+ Upload(reader io.Reader, contentType string) error
+ UploadFile(filename string) error
+ GetReader() (io.ReadCloser, error)
+}
+
+func (c *Client) ObjectInfo(container, filename string) (f ObjectInfo, err error) {
+ request, err := c.NewRequest(headMethod, nil, container, filename)
+ if err != nil {
+ return f, err
+ }
+ res, err := c.do(request)
+ if err != nil {
+ return f, err
+ }
+ if res.StatusCode == http.StatusNotFound {
+ return f, ErrorObjectNotFound
+ }
+ if res.StatusCode != http.StatusOK {
+ return f, ErrorBadResponce
+ }
+ parse := func(key string) uint64 {
+ v, _ := strconv.ParseUint(res.Header.Get(key), uint64Base, uint64BitSize)
+ return v
+ }
+ f.Size = uint64(res.ContentLength)
+ f.Hash = res.Header.Get(etagHeader)
+ f.ContentType = res.Header.Get(contentTypeHeader)
+ f.LastModified, err = time.Parse(lastModifiedLayout, res.Header.Get(lastModifiedHeader))
+ f.Name = filename
+ if err != nil {
+ return
+ }
+ f.Downloaded = parse(objectDownloadsHeader)
+ return
+}
+
+func (o *Object) Info() (info ObjectInfo, err error) {
+ return o.container.ObjectInfo(o.name)
+}
+
+func (o *Object) Upload(reader io.Reader, contentType string) error {
+ return o.container.Upload(reader, o.name, contentType)
+}
+
+func (o *Object) UploadFile(filename string) error {
+ return o.container.UploadFile(filename)
+}
+
+func (o *Object) Download() ([]byte, error) {
+ reader, err := o.GetReader()
+ if err != nil {
+ return nil, err
+ }
+ return ioutil.ReadAll(reader)
+}
+
+func (o *Object) GetReader() (io.ReadCloser, error) {
+ request, _ := http.NewRequest(getMethod, o.container.URL(o.name), nil)
+ res, err := o.api.Do(request)
+ if err != nil {
+ return nil, err
+ }
+ if res.StatusCode == http.StatusNotFound {
+ return nil, ErrorObjectNotFound
+ }
+ if res.StatusCode != http.StatusOK {
+ return nil, ErrorBadResponce
+ }
+ return res.Body, nil
+}
+
+func (o *Object) Remove() error {
+ return o.container.RemoveObject(o.name)
+}
diff --git a/vendor/github.com/ernado/selectel/storage/selctl/main.go b/vendor/github.com/ernado/selectel/storage/selctl/main.go
new file mode 100644
index 0000000..5bf657b
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/selctl/main.go
@@ -0,0 +1,436 @@
+package main
+
+import (
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/rand"
+ "crypto/sha256"
+ "errors"
+ "fmt"
+ "github.com/cheggaaa/pb"
+ "github.com/ernado/selectel/storage"
+ "github.com/jwaldrip/odin/cli"
+ "github.com/olekukonko/tablewriter"
+ "io"
+ "io/ioutil"
+ "log"
+ "mime"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+const (
+ envKey = storage.EnvKey
+ envUser = storage.EnvUser
+ version = "1.1"
+ cacheFilename = "~selct.cache~" + version
+ envCache = "SELECTEL_CACHE"
+ envContainer = "SELECTEL_CONTAINER"
+)
+
+var (
+ client = cli.New(version, "Selectel storage command line client", connect)
+ user, key string
+ container string
+ api storage.API
+ debug bool
+ cache bool
+ cacheSecure bool
+ errorNotEnough = errors.New("Not enought arguments")
+)
+
+func encryptionKey() []byte {
+ hasher := sha256.New()
+ hasher.Write([]byte("selectel storage command line client"))
+ hasher.Write([]byte(key))
+ hasher.Write([]byte(user))
+ return hasher.Sum(nil)
+}
+
+func encrypt(data []byte) []byte {
+ block, err := aes.NewCipher(encryptionKey())
+ if err != nil {
+ panic(err)
+ }
+
+ ciphertext := make([]byte, aes.BlockSize+len(data))
+ iv := ciphertext[:aes.BlockSize]
+ if _, err := io.ReadFull(rand.Reader, iv); err != nil {
+ panic(err)
+ }
+
+ stream := cipher.NewCFBEncrypter(block, iv)
+ stream.XORKeyStream(ciphertext[aes.BlockSize:], data)
+ return ciphertext
+}
+
+func decrypt(data []byte) ([]byte, error) {
+ block, err := aes.NewCipher(encryptionKey())
+ if err != nil {
+ return nil, err
+ }
+
+ if len(data) < aes.BlockSize {
+ return nil, errors.New("ciphertext too short")
+ }
+ iv := data[:aes.BlockSize]
+ data = data[aes.BlockSize:]
+ stream := cipher.NewCFBDecrypter(block, iv)
+ stream.XORKeyStream(data, data)
+
+ return data, nil
+}
+
+func init() {
+ client.DefineBoolFlagVar(&debug, "debug", false, "debug mode")
+ client.DefineBoolFlagVar(&cache, "cache", false, fmt.Sprintf("cache token in file (%s)", envCache))
+ client.DefineBoolFlagVar(&cacheSecure, "cache.secure", true, "encrypt/decrypt token with user-key pair (true by default)")
+ client.DefineStringFlag("key", "", fmt.Sprintf("selectel storage key (%s)", envKey))
+ client.AliasFlag('k', "key")
+ client.DefineStringFlag("user", "", fmt.Sprintf("selectel storage user (%s)", envUser))
+ client.AliasFlag('u', "user")
+ client.DefineStringFlag("container", "", fmt.Sprintf("default container (%s)", envContainer))
+ client.AliasFlag('c', "container")
+
+ infoCommand := client.DefineSubCommand("info", "print information about storage/container/object", wrap(info))
+ infoCommand.DefineStringFlag("type", "storage", "storage, container or object")
+ infoCommand.AliasFlag('t', "type")
+
+ listCommand := client.DefineSubCommand("list", "list objects in container/storage", wrap(list))
+ listCommand.DefineStringFlag("type", "storage", "storage or container")
+ listCommand.AliasFlag('t', "type")
+
+ client.DefineSubCommand("upload", "upload object to container", wrap(upload))
+ downloadCommand := client.DefineSubCommand("download", "download object from container", wrap(download))
+ downloadCommand.DefineStringFlag("path", "", "destination path")
+ downloadCommand.AliasFlag('p', "path")
+
+ client.DefineSubCommand("create", "create container", wrap(create))
+
+ removeCommand := client.DefineSubCommand("remove", "remove object or container", wrap(remove))
+ removeCommand.DefineStringFlag("type", "object", "container or object")
+ removeCommand.DefineBoolFlag("force", false, "remove container with files")
+ removeCommand.AliasFlag('f', "force")
+ removeCommand.AliasFlag('t', "type")
+}
+
+func readFlag(c cli.Command, name, env string) string {
+ if len(os.Getenv(env)) > 0 {
+ return os.Getenv(env)
+ }
+ return c.Flag(name).String()
+}
+
+func blank(s string) bool {
+ return len(s) == 0
+}
+
+func load() ([]byte, error) {
+ f, err := os.Open(cacheFilename)
+ if err != nil {
+ return nil, err
+ }
+ data, err := ioutil.ReadAll(f)
+ if err != nil {
+ return nil, err
+ }
+ if !cacheSecure {
+ return data, nil
+ }
+ return decrypt(data)
+}
+
+// connect reads credentials and performs auth
+func connect(c cli.Command) {
+ var err error
+
+ key = readFlag(c, "key", envKey)
+ user = readFlag(c, "user", envUser)
+ container = readFlag(c, "container", envContainer)
+
+ if strings.ToLower(os.Getenv(envCache)) == "true" {
+ cache = true
+ }
+
+ if cache {
+ var data []byte
+ data, err = load()
+ if err != nil {
+ log.Println(err)
+ } else {
+ api, err = storage.NewFromCache(data)
+ if err == nil {
+ return
+ } else {
+ log.Println("unable to load from cache:", err)
+ }
+ }
+ } else {
+ os.Remove(cacheFilename)
+ }
+
+ // checking for blank credentials
+ if blank(key) || blank(user) && api != nil {
+ log.Fatal(storage.ErrorBadCredentials)
+ }
+
+ // connencting to api
+ api = storage.NewAsync(user, key)
+ api.Debug(debug)
+ if err = api.Auth(user, key); err != nil {
+ log.Fatal(err)
+ }
+}
+
+func wrap(callback func(cli.Command)) func(cli.Command) {
+ return func(c cli.Command) {
+ connect(c.Parent())
+ defer func() {
+ if !cache {
+ return
+ }
+ data, _ := api.Dump()
+ if cacheSecure {
+ data = encrypt(data)
+ }
+ f, _ := os.Create(cacheFilename)
+ f.Write(data)
+ }()
+ callback(c)
+ }
+}
+
+// info prints information about storage
+func info(c cli.Command) {
+ var (
+ containerName = container
+ objectName string
+ data interface{}
+ err error
+ arglen = len(c.Args())
+ command = c.Flag("type").String()
+ )
+
+ defer func() {
+ if err != nil {
+ log.Fatal(err)
+ }
+ if blank(containerName) || command == "storage" {
+ data = api.Info()
+ } else {
+ containerApi := api.Container(containerName)
+ if blank(objectName) {
+ data, err = containerApi.Info()
+ } else {
+ data, err = containerApi.Object(objectName).Info()
+ }
+ }
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("%+v\n", data)
+ }()
+
+ if arglen > 0 {
+ if command == "container" {
+ containerName = c.Arg(0).String()
+ return
+ }
+ command = "object"
+ if !blank(containerName) && arglen == 1 {
+ objectName = c.Arg(0).String()
+ return
+ }
+ if arglen == 2 {
+ containerName = c.Arg(0).String()
+ objectName = c.Arg(1).String()
+ return
+ }
+ }
+ if command == "container" && !blank(containerName) {
+ return
+ }
+ if command == "storage" {
+ return
+ }
+ err = errorNotEnough
+}
+
+func remove(c cli.Command) {
+ var (
+ arglen = len(c.Args())
+ object string
+ err error
+ message string
+ objects []storage.ObjectAPI
+ )
+ if arglen == 2 {
+ container = c.Arg(0).String()
+ object = c.Arg(1).String()
+ }
+ if arglen == 1 {
+ if c.Flag("type").String() == "container" {
+ container = c.Arg(0).String()
+ } else {
+ object = c.Arg(0).String()
+ }
+ }
+ if blank(container) {
+ log.Fatal(errorNotEnough)
+ }
+ if blank(object) {
+ containerApi := api.Container(container)
+ err = containerApi.Remove()
+
+ // forced removal of container
+ if err == storage.ErrorConianerNotEmpty && c.Flag("force").Get().(bool) {
+ fmt.Println("removing all objects of", container)
+ objects, err = containerApi.Objects()
+ if err != nil {
+ log.Fatal(err)
+ }
+ for _, object := range objects {
+ err = object.Remove()
+ // skipping NotFound errors as non-critical
+ if err != nil && err != storage.ErrorObjectNotFound {
+ log.Fatal(err)
+ }
+ }
+ err = containerApi.Remove()
+ }
+ message = fmt.Sprintf("container %s removed", container)
+ } else {
+ err = api.Container(container).Object(object).Remove()
+ message = fmt.Sprintf("object %s removed in container %s", object, container)
+ }
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(message)
+}
+
+func create(c cli.Command) {
+ if len(c.Args()) == 0 {
+ log.Fatal(errorNotEnough)
+ }
+ var name = c.Arg(0).String()
+ if _, err := api.CreateContainer(name, false); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("created container %s\n", name)
+}
+
+func upload(c cli.Command) {
+ var path string
+ switch len(c.Args()) {
+ case 1:
+ path = c.Arg(0).String()
+ case 2:
+ container = c.Arg(0).String()
+ path = c.Arg(1).String()
+ }
+ if blank(container) || blank(path) {
+ log.Fatal(errorNotEnough)
+ }
+
+ f, err := os.Open(path)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ stat, err := os.Stat(path)
+ if err != nil {
+ log.Fatal(err)
+ }
+ ext := filepath.Ext(path)
+ mimetype := mime.TypeByExtension(ext)
+ bar := pb.New64(stat.Size()).SetUnits(pb.U_BYTES)
+ bar.Start()
+ reader := io.TeeReader(f, bar)
+ if err := api.Container(container).Upload(reader, stat.Name(), mimetype); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("uploaded to %s\n", container)
+}
+
+func list(c cli.Command) {
+ var (
+ arglen = len(c.Args())
+ table = tablewriter.NewWriter(os.Stdout)
+ )
+ if arglen == 0 && (blank(container) || c.Flag("type").String() == "storage") {
+ containers, err := api.ContainersInfo()
+ if err != nil {
+ log.Fatal(err)
+ }
+ table.SetHeader([]string{"Name", "Objects", "Type"})
+ for _, cont := range containers {
+ v := []string{cont.Name, fmt.Sprint(cont.ObjectCount), cont.Type}
+ table.Append(v)
+ }
+ table.Render()
+ return
+ }
+ if arglen == 1 {
+ container = c.Arg(0).String()
+ }
+ if blank(container) {
+ log.Fatal(errorNotEnough)
+ }
+ objects, err := api.Container(container).ObjectsInfo()
+ if err != nil {
+ log.Fatal(err)
+ }
+ table.SetHeader([]string{"Name", "Size", "Downloaded"})
+ for _, object := range objects {
+ v := []string{object.Name, fmt.Sprint(object.Size), fmt.Sprint(object.Downloaded)}
+ table.Append(v)
+ }
+ table.Render()
+}
+
+func download(c cli.Command) {
+ var (
+ arglen = len(c.Args())
+ objectName string
+ path = c.Flag("path").String()
+ )
+ switch arglen {
+ case 1:
+ objectName = c.Arg(0).String()
+ case 2:
+ objectName = c.Arg(1).String()
+ container = c.Arg(0).String()
+ }
+ if blank(container) || blank(objectName) {
+ log.Fatal(errorNotEnough)
+ }
+ if blank(path) {
+ path = objectName
+ }
+ reader, err := api.Container(container).Object(objectName).GetReader()
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer reader.Close()
+ fmt.Printf("downloading %s->%s from %s\n", objectName, path, container)
+ f, err := os.Create(path)
+ if err != nil {
+ log.Fatal(err)
+ }
+ n, err := io.Copy(f, reader)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("downloaded %s, %d bytes\n", objectName, n)
+}
+
+func main() {
+ defer func() {
+ if r := recover(); r != nil {
+ fmt.Println("Recovered", r)
+ }
+ }()
+ client.Start()
+}
diff --git a/vendor/github.com/ernado/selectel/storage/storage.go b/vendor/github.com/ernado/selectel/storage/storage.go
new file mode 100644
index 0000000..06f9ed4
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/storage.go
@@ -0,0 +1,393 @@
+package storage
+
+import (
+ "bytes"
+ "encoding/gob"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+)
+
+const (
+ fileLastModifiedLayout = "2006-01-02T15:04:05.999999"
+ queryFormat = "format"
+ queryJSON = "json"
+ headMethod = "HEAD"
+ getMethod = "GET"
+ postMethod = "POST"
+ putMethod = "PUT"
+ deleteMethod = "DELETE"
+ authTokenHeader = "X-Auth-Token"
+ objectCountHeader = "X-Account-Object-Count"
+ bytesUsedHeader = "X-Account-Bytes-Used"
+ containerCountHeader = "X-Account-Container-Count"
+ recievedBytesHeader = "X-Received-Bytes"
+ transferedBytesHeader = "X-Transfered-Bytes"
+ uint64BitSize = 64
+ uint64Base = 10
+ // EnvUser is environmental variable for selectel api username
+ EnvUser = "SELECTEL_USER"
+ // EnvKey is environmental variable for selectel api key
+ EnvKey = "SELECTEL_KEY"
+)
+
+var (
+ // ErrorObjectNotFound occurs when server returns 404
+ ErrorObjectNotFound = errors.New("Object not found")
+ // ErrorBadResponce occurs when server returns unexpected code
+ ErrorBadResponce = errors.New("Unable to process api responce")
+ // ErrorBadName
+ ErrorBadName = errors.New("Bad container/object name provided")
+ // ErrorBadJSON occurs on unmarhalling error
+ ErrorBadJSON = errors.New("Unable to parse api responce")
+)
+
+// Client is selectel storage api client
+type Client struct {
+ storageURL *url.URL
+ token string
+ tokenExpire int
+ expireFrom *time.Time
+ user string
+ key string
+ client DoClient
+ file fileMock
+ debug bool
+}
+
+type ClientCredentials struct {
+ Token string
+ Debug bool
+ Expire int
+ ExpireFrom *time.Time
+ URL string
+}
+
+func NewFromCache(data []byte) (API, error) {
+ var (
+ cache = new(ClientCredentials)
+ err error
+ )
+ decorer := gob.NewDecoder(bytes.NewBuffer(data))
+ if err = decorer.Decode(cache); err != nil {
+ return nil, err
+ }
+ c := newClient(new(http.Client))
+ c.token = cache.Token
+ c.tokenExpire = cache.Expire
+ c.debug = cache.Debug
+ c.expireFrom = cache.ExpireFrom
+ c.storageURL, err = url.Parse(cache.URL)
+ if err != nil {
+ return nil, ErrorBadCredentials
+ }
+ return c, nil
+}
+
+func (c *Client) Credentials() (cache ClientCredentials) {
+ cache.URL = c.storageURL.String()
+ cache.Expire = c.tokenExpire
+ cache.ExpireFrom = c.expireFrom
+ cache.Token = c.token
+ cache.Debug = c.debug
+
+ return cache
+}
+
+func (c *Client) Dump() ([]byte, error) {
+ buffer := new(bytes.Buffer)
+ encoder := gob.NewEncoder(buffer)
+ if err := encoder.Encode(c.Credentials()); err != nil {
+ return nil, err
+ }
+ return buffer.Bytes(), nil
+}
+
+// StorageInformation contains some usefull metrics about storage for current user
+type StorageInformation struct {
+ ObjectCount uint64
+ BytesUsed uint64
+ ContainerCount uint64
+ RecievedBytes uint64
+ TransferedBytes uint64
+}
+
+// API for selectel storage
+type API interface {
+ DoClient
+ Info() StorageInformation
+ Upload(reader io.Reader, container, filename, t string) error
+ UploadFile(filename, container string) error
+ Auth(user, key string) error
+ Debug(debug bool)
+ Token() string
+ C(string) ContainerAPI
+ Container(string) ContainerAPI
+ RemoveObject(container, filename string) error
+ URL(container, filename string) string
+ CreateContainer(name string, private bool) (ContainerAPI, error)
+ RemoveContainer(name string) error
+ // ObjectInfo returns information about object in container
+ ObjectInfo(container, filename string) (f ObjectInfo, err error)
+ ObjectsInfo(container string) ([]ObjectInfo, error)
+ ContainerInfo(name string) (info ContainerInfo, err error)
+ ContainersInfo() ([]ContainerInfo, error)
+ Containers() ([]ContainerAPI, error)
+ Credentials() (cache ClientCredentials)
+ Dump() ([]byte, error)
+}
+
+// DoClient is mock of http.Client
+type DoClient interface {
+ Do(request *http.Request) (*http.Response, error)
+}
+
+// setClient sets client
+func (c *Client) setClient(client DoClient) {
+ c.client = client
+}
+
+func (c *Client) Debug(debug bool) {
+ c.debug = debug
+}
+
+// ContainersInfo return all container-specific information from storage
+func (c *Client) ContainersInfo() ([]ContainerInfo, error) {
+ info := []ContainerInfo{}
+ request, err := c.NewRequest(getMethod, nil)
+ if err != nil {
+ return nil, err
+ }
+ query := request.URL.Query()
+ query.Add(queryFormat, queryJSON)
+ request.URL.RawQuery = query.Encode()
+ res, err := c.Do(request)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
+ if res.StatusCode != http.StatusOK {
+ return nil, ErrorBadResponce
+ }
+ decoder := json.NewDecoder(res.Body)
+ if err := decoder.Decode(&info); err != nil {
+ return nil, ErrorBadJSON
+ }
+ return info, nil
+}
+
+// Containers return all containers from storage
+func (c *Client) Containers() ([]ContainerAPI, error) {
+ info, err := c.ContainersInfo()
+ if err != nil {
+ return nil, err
+ }
+ containers := []ContainerAPI{}
+ for _, container := range info {
+ containers = append(containers, c.Container(container.Name))
+ }
+ return containers, nil
+}
+
+// ObjectsInfo returns information about all objects in container
+func (c *Client) ObjectsInfo(container string) ([]ObjectInfo, error) {
+ info := []ObjectInfo{}
+ request, err := c.NewRequest(getMethod, nil, container)
+ if err != nil {
+ return nil, err
+ }
+ query := request.URL.Query()
+ query.Add(queryFormat, queryJSON)
+ request.URL.RawQuery = query.Encode()
+ res, err := c.Do(request)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
+ if res.StatusCode == http.StatusNotFound {
+ return nil, ErrorObjectNotFound
+ }
+ if res.StatusCode != http.StatusOK {
+ return nil, ErrorBadResponce
+ }
+ decoder := json.NewDecoder(res.Body)
+ if err := decoder.Decode(&info); err != nil {
+ return nil, ErrorBadJSON
+ }
+ for i, v := range info {
+ info[i].LastModified, err = time.Parse(fileLastModifiedLayout, v.LastModifiedStr)
+ if err != nil {
+ return info, err
+ }
+ }
+ return info, nil
+}
+
+// DeleteObject removes object from specified container
+func (c *Client) RemoveObject(container, filename string) error {
+ request, err := c.NewRequest(deleteMethod, nil, container, filename)
+ if err != nil {
+ return err
+ }
+ res, err := c.Do(request)
+ if err != nil {
+ return err
+ }
+ if res.StatusCode == http.StatusNotFound {
+ return ErrorObjectNotFound
+ }
+ if res.StatusCode == http.StatusNoContent {
+ return nil
+ }
+ return ErrorBadResponce
+}
+
+// Info returns StorageInformation for current user
+func (c *Client) Info() (info StorageInformation) {
+ request, err := c.NewRequest(getMethod, nil)
+ if err != nil {
+ return
+ }
+ res, err := c.do(request)
+ if err != nil {
+ return
+ }
+ parse := func(key string) uint64 {
+ v, _ := strconv.ParseUint(res.Header.Get(key), uint64Base, uint64BitSize)
+ return v
+ }
+ info.BytesUsed = parse(bytesUsedHeader)
+ info.ObjectCount = parse(objectCountHeader)
+ info.ContainerCount = parse(containerCountHeader)
+ info.RecievedBytes = parse(recievedBytesHeader)
+ info.TransferedBytes = parse(transferedBytesHeader)
+ return
+}
+
+// URL returns url for file in container
+func (c *Client) URL(container, filename string) string {
+ return c.url(container, filename)
+}
+
+// Do performs request with auth token
+func (c *Client) Do(request *http.Request) (res *http.Response, err error) {
+ return c.do(request)
+}
+
+func (c *Client) do(request *http.Request) (res *http.Response, err error) {
+ // prevent null pointer dereference
+ if request.Header == nil {
+ request.Header = http.Header{}
+ }
+ // check for token expiration / first request with async auth
+ if request.URL.String() != authURL && c.Expired() {
+ log.Println("[selectel]", "token expired, performing auth")
+ if err = c.Auth(c.user, c.key); err != nil {
+ return
+ }
+ // fix hostname of request
+ c.fixURL(request)
+ }
+ // add auth token to headers
+ if !blank(c.token) {
+ request.Header.Add(authTokenHeader, c.token)
+ }
+ if c.debug {
+ // perform request and record time elapsed
+ start := time.Now().Truncate(time.Millisecond)
+ res, err = c.client.Do(request)
+ stop := time.Now().Truncate(time.Millisecond)
+ duration := stop.Sub(start)
+ // log error
+ if err != nil {
+ log.Println(request.Method, request.URL.String(), err, duration)
+ return
+ }
+ // log request
+ log.Println(request.Method, request.URL.String(), res.StatusCode, duration)
+ // check for auth code
+ } else {
+ res, err = c.client.Do(request)
+ if err != nil {
+ return
+ }
+ }
+ if res.StatusCode == http.StatusUnauthorized {
+ c.expireFrom = nil // ensure that next request will force authentication
+ return nil, ErrorAuth
+ }
+ return
+}
+
+func (c *Client) NewRequest(method string, body io.Reader, parms ...string) (*http.Request, error) {
+ var badName bool
+ for i := range parms {
+ // check for length
+ if len(parms[i]) > 256 {
+ badName = true
+ }
+ // todo: check for trialing slash
+ parms[i] = url.QueryEscape(parms[i])
+ }
+ req, err := http.NewRequest(method, c.url(parms...), body)
+ if err != nil || badName {
+ return nil, ErrorBadName
+ }
+ return req, nil
+}
+
+func (c *Client) fixURL(request *http.Request) error {
+ newRequest, err := http.NewRequest(request.Method, c.url(request.URL.Path), request.Body)
+ *request = *newRequest
+ return err
+}
+
+func (c *Client) url(postfix ...string) string {
+ path := strings.Join(postfix, "/")
+ if c.storageURL == nil {
+ return path
+ }
+ return fmt.Sprintf("%s%s", c.storageURL, path)
+}
+
+// New returns new selectel storage api client
+func New(user, key string) (API, error) {
+ client := newClient(new(http.Client))
+ return client, client.Auth(user, key)
+}
+
+// NewAsync returns new api client and lazily performs auth
+func NewAsync(user, key string) API {
+ c := newClient(new(http.Client))
+ if blank(user) || blank(key) {
+ panic(ErrorBadCredentials)
+ }
+ c.user = user
+ c.key = key
+ return c
+}
+
+func newClient(client *http.Client) *Client {
+ c := new(Client)
+ c.client = client
+ return c
+}
+
+// NewEnv acts as New, but reads credentials from environment
+func NewEnv() (API, error) {
+ user := os.Getenv(EnvUser)
+ key := os.Getenv(EnvKey)
+ return New(user, key)
+}
+
+func blank(s string) bool {
+ return len(s) == 0
+}
diff --git a/vendor/github.com/ernado/selectel/storage/upload.go b/vendor/github.com/ernado/selectel/storage/upload.go
new file mode 100644
index 0000000..21b33ab
--- /dev/null
+++ b/vendor/github.com/ernado/selectel/storage/upload.go
@@ -0,0 +1,129 @@
+package storage
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "os"
+ "path/filepath"
+)
+
+const (
+ contentTypeHeader = "Content-Type"
+)
+
+// fileMock is mock for file operations
+type fileMock interface {
+ Open(name string) (*os.File, error)
+ Stat(name string) (os.FileInfo, error)
+}
+
+// fileErrorMock is simple mock that returns specified errors on
+// function call.
+type fileErrorMock struct {
+ errOpen error
+ errStat error
+}
+
+func (f fileErrorMock) Open(name string) (*os.File, error) {
+ return nil, f.errOpen
+}
+
+func (f fileErrorMock) Stat(name string) (os.FileInfo, error) {
+ return nil, f.errStat
+}
+
+func (c *Client) fileOpen(name string) (*os.File, error) {
+ if c.file != nil {
+ return c.file.Open(name)
+ }
+ return os.Open(name)
+}
+
+func (c *Client) fileSetMockError(errOpen, errStat error) {
+ c.file = &fileErrorMock{errOpen, errStat}
+}
+
+func (c *Client) fileStat(name string) (os.FileInfo, error) {
+ if c.file != nil {
+ return c.file.Stat(name)
+ }
+ return os.Stat(name)
+}
+
+// UploadFile to container
+func (c *Client) UploadFile(filename, container string) error {
+ f, err := c.fileOpen(filename)
+ if err != nil {
+ return err
+ }
+ stats, err := c.fileStat(filename)
+ if err != nil {
+ return err
+ }
+ ext := filepath.Ext(filename)
+ mimetype := mime.TypeByExtension(ext)
+ return c.Upload(f, container, stats.Name(), mimetype)
+}
+
+func (c *Client) upload(reader io.Reader, container, filename, contentType string, check bool) error {
+ var etag string
+ closer, ok := reader.(io.ReadCloser)
+ if ok {
+ defer closer.Close()
+ }
+
+ if check {
+ f, err := ioutil.TempFile(os.TempDir(), filename)
+ if err != nil {
+ return err
+ }
+ stat, _ := f.Stat()
+ path := stat.Name()
+ hasher := md5.New()
+ writer := io.MultiWriter(f, hasher)
+ _, err = io.Copy(writer, reader)
+ f.Close()
+ if err != nil {
+ return err
+ }
+ etag = hex.EncodeToString(hasher.Sum(nil))
+ reader, err = os.Open(filepath.Join(os.TempDir(), path))
+ defer os.Remove(path)
+ if err != nil {
+ return err
+ }
+ }
+
+ request, err := c.NewRequest(putMethod, reader, container, filename)
+ if err != nil {
+ return err
+ }
+ if !blank(contentType) {
+ request.Header.Add(contentTypeHeader, contentType)
+ }
+
+ if !blank(etag) {
+ request.Header.Add(etagHeader, etag)
+ }
+
+ res, err := c.do(request)
+ if err != nil {
+ return err
+ }
+ defer res.Body.Close()
+
+ if res.StatusCode != http.StatusCreated {
+ return ErrorBadResponce
+ }
+
+ return nil
+}
+
+// Upload reads all data from reader and uploads to contaier with filename and content type
+func (c *Client) Upload(reader io.Reader, container, filename, contentType string) error {
+ return c.upload(reader, container, filename, contentType, true)
+}
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go
deleted file mode 100644
index 741eeb1..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go
+++ /dev/null
@@ -1,892 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package terminal
-
-import (
- "bytes"
- "io"
- "sync"
- "unicode/utf8"
-)
-
-// EscapeCodes contains escape sequences that can be written to the terminal in
-// order to achieve different styles of text.
-type EscapeCodes struct {
- // Foreground colors
- Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte
-
- // Reset all attributes
- Reset []byte
-}
-
-var vt100EscapeCodes = EscapeCodes{
- Black: []byte{keyEscape, '[', '3', '0', 'm'},
- Red: []byte{keyEscape, '[', '3', '1', 'm'},
- Green: []byte{keyEscape, '[', '3', '2', 'm'},
- Yellow: []byte{keyEscape, '[', '3', '3', 'm'},
- Blue: []byte{keyEscape, '[', '3', '4', 'm'},
- Magenta: []byte{keyEscape, '[', '3', '5', 'm'},
- Cyan: []byte{keyEscape, '[', '3', '6', 'm'},
- White: []byte{keyEscape, '[', '3', '7', 'm'},
-
- Reset: []byte{keyEscape, '[', '0', 'm'},
-}
-
-// Terminal contains the state for running a VT100 terminal that is capable of
-// reading lines of input.
-type Terminal struct {
- // AutoCompleteCallback, if non-null, is called for each keypress with
- // the full input line and the current position of the cursor (in
- // bytes, as an index into |line|). If it returns ok=false, the key
- // press is processed normally. Otherwise it returns a replacement line
- // and the new cursor position.
- AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool)
-
- // Escape contains a pointer to the escape codes for this terminal.
- // It's always a valid pointer, although the escape codes themselves
- // may be empty if the terminal doesn't support them.
- Escape *EscapeCodes
-
- // lock protects the terminal and the state in this object from
- // concurrent processing of a key press and a Write() call.
- lock sync.Mutex
-
- c io.ReadWriter
- prompt []rune
-
- // line is the current line being entered.
- line []rune
- // pos is the logical position of the cursor in line
- pos int
- // echo is true if local echo is enabled
- echo bool
- // pasteActive is true iff there is a bracketed paste operation in
- // progress.
- pasteActive bool
-
- // cursorX contains the current X value of the cursor where the left
- // edge is 0. cursorY contains the row number where the first row of
- // the current line is 0.
- cursorX, cursorY int
- // maxLine is the greatest value of cursorY so far.
- maxLine int
-
- termWidth, termHeight int
-
- // outBuf contains the terminal data to be sent.
- outBuf []byte
- // remainder contains the remainder of any partial key sequences after
- // a read. It aliases into inBuf.
- remainder []byte
- inBuf [256]byte
-
- // history contains previously entered commands so that they can be
- // accessed with the up and down keys.
- history stRingBuffer
- // historyIndex stores the currently accessed history entry, where zero
- // means the immediately previous entry.
- historyIndex int
- // When navigating up and down the history it's possible to return to
- // the incomplete, initial line. That value is stored in
- // historyPending.
- historyPending string
-}
-
-// NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is
-// a local terminal, that terminal must first have been put into raw mode.
-// prompt is a string that is written at the start of each input line (i.e.
-// "> ").
-func NewTerminal(c io.ReadWriter, prompt string) *Terminal {
- return &Terminal{
- Escape: &vt100EscapeCodes,
- c: c,
- prompt: []rune(prompt),
- termWidth: 80,
- termHeight: 24,
- echo: true,
- historyIndex: -1,
- }
-}
-
-const (
- keyCtrlD = 4
- keyCtrlU = 21
- keyEnter = '\r'
- keyEscape = 27
- keyBackspace = 127
- keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota
- keyUp
- keyDown
- keyLeft
- keyRight
- keyAltLeft
- keyAltRight
- keyHome
- keyEnd
- keyDeleteWord
- keyDeleteLine
- keyClearScreen
- keyPasteStart
- keyPasteEnd
-)
-
-var pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'}
-var pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'}
-
-// bytesToKey tries to parse a key sequence from b. If successful, it returns
-// the key and the remainder of the input. Otherwise it returns utf8.RuneError.
-func bytesToKey(b []byte, pasteActive bool) (rune, []byte) {
- if len(b) == 0 {
- return utf8.RuneError, nil
- }
-
- if !pasteActive {
- switch b[0] {
- case 1: // ^A
- return keyHome, b[1:]
- case 5: // ^E
- return keyEnd, b[1:]
- case 8: // ^H
- return keyBackspace, b[1:]
- case 11: // ^K
- return keyDeleteLine, b[1:]
- case 12: // ^L
- return keyClearScreen, b[1:]
- case 23: // ^W
- return keyDeleteWord, b[1:]
- }
- }
-
- if b[0] != keyEscape {
- if !utf8.FullRune(b) {
- return utf8.RuneError, b
- }
- r, l := utf8.DecodeRune(b)
- return r, b[l:]
- }
-
- if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' {
- switch b[2] {
- case 'A':
- return keyUp, b[3:]
- case 'B':
- return keyDown, b[3:]
- case 'C':
- return keyRight, b[3:]
- case 'D':
- return keyLeft, b[3:]
- case 'H':
- return keyHome, b[3:]
- case 'F':
- return keyEnd, b[3:]
- }
- }
-
- if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' {
- switch b[5] {
- case 'C':
- return keyAltRight, b[6:]
- case 'D':
- return keyAltLeft, b[6:]
- }
- }
-
- if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) {
- return keyPasteStart, b[6:]
- }
-
- if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) {
- return keyPasteEnd, b[6:]
- }
-
- // If we get here then we have a key that we don't recognise, or a
- // partial sequence. It's not clear how one should find the end of a
- // sequence without knowing them all, but it seems that [a-zA-Z~] only
- // appears at the end of a sequence.
- for i, c := range b[0:] {
- if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' {
- return keyUnknown, b[i+1:]
- }
- }
-
- return utf8.RuneError, b
-}
-
-// queue appends data to the end of t.outBuf
-func (t *Terminal) queue(data []rune) {
- t.outBuf = append(t.outBuf, []byte(string(data))...)
-}
-
-var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'}
-var space = []rune{' '}
-
-func isPrintable(key rune) bool {
- isInSurrogateArea := key >= 0xd800 && key <= 0xdbff
- return key >= 32 && !isInSurrogateArea
-}
-
-// moveCursorToPos appends data to t.outBuf which will move the cursor to the
-// given, logical position in the text.
-func (t *Terminal) moveCursorToPos(pos int) {
- if !t.echo {
- return
- }
-
- x := visualLength(t.prompt) + pos
- y := x / t.termWidth
- x = x % t.termWidth
-
- up := 0
- if y < t.cursorY {
- up = t.cursorY - y
- }
-
- down := 0
- if y > t.cursorY {
- down = y - t.cursorY
- }
-
- left := 0
- if x < t.cursorX {
- left = t.cursorX - x
- }
-
- right := 0
- if x > t.cursorX {
- right = x - t.cursorX
- }
-
- t.cursorX = x
- t.cursorY = y
- t.move(up, down, left, right)
-}
-
-func (t *Terminal) move(up, down, left, right int) {
- movement := make([]rune, 3*(up+down+left+right))
- m := movement
- for i := 0; i < up; i++ {
- m[0] = keyEscape
- m[1] = '['
- m[2] = 'A'
- m = m[3:]
- }
- for i := 0; i < down; i++ {
- m[0] = keyEscape
- m[1] = '['
- m[2] = 'B'
- m = m[3:]
- }
- for i := 0; i < left; i++ {
- m[0] = keyEscape
- m[1] = '['
- m[2] = 'D'
- m = m[3:]
- }
- for i := 0; i < right; i++ {
- m[0] = keyEscape
- m[1] = '['
- m[2] = 'C'
- m = m[3:]
- }
-
- t.queue(movement)
-}
-
-func (t *Terminal) clearLineToRight() {
- op := []rune{keyEscape, '[', 'K'}
- t.queue(op)
-}
-
-const maxLineLength = 4096
-
-func (t *Terminal) setLine(newLine []rune, newPos int) {
- if t.echo {
- t.moveCursorToPos(0)
- t.writeLine(newLine)
- for i := len(newLine); i < len(t.line); i++ {
- t.writeLine(space)
- }
- t.moveCursorToPos(newPos)
- }
- t.line = newLine
- t.pos = newPos
-}
-
-func (t *Terminal) advanceCursor(places int) {
- t.cursorX += places
- t.cursorY += t.cursorX / t.termWidth
- if t.cursorY > t.maxLine {
- t.maxLine = t.cursorY
- }
- t.cursorX = t.cursorX % t.termWidth
-
- if places > 0 && t.cursorX == 0 {
- // Normally terminals will advance the current position
- // when writing a character. But that doesn't happen
- // for the last character in a line. However, when
- // writing a character (except a new line) that causes
- // a line wrap, the position will be advanced two
- // places.
- //
- // So, if we are stopping at the end of a line, we
- // need to write a newline so that our cursor can be
- // advanced to the next line.
- t.outBuf = append(t.outBuf, '\n')
- }
-}
-
-func (t *Terminal) eraseNPreviousChars(n int) {
- if n == 0 {
- return
- }
-
- if t.pos < n {
- n = t.pos
- }
- t.pos -= n
- t.moveCursorToPos(t.pos)
-
- copy(t.line[t.pos:], t.line[n+t.pos:])
- t.line = t.line[:len(t.line)-n]
- if t.echo {
- t.writeLine(t.line[t.pos:])
- for i := 0; i < n; i++ {
- t.queue(space)
- }
- t.advanceCursor(n)
- t.moveCursorToPos(t.pos)
- }
-}
-
-// countToLeftWord returns then number of characters from the cursor to the
-// start of the previous word.
-func (t *Terminal) countToLeftWord() int {
- if t.pos == 0 {
- return 0
- }
-
- pos := t.pos - 1
- for pos > 0 {
- if t.line[pos] != ' ' {
- break
- }
- pos--
- }
- for pos > 0 {
- if t.line[pos] == ' ' {
- pos++
- break
- }
- pos--
- }
-
- return t.pos - pos
-}
-
-// countToRightWord returns then number of characters from the cursor to the
-// start of the next word.
-func (t *Terminal) countToRightWord() int {
- pos := t.pos
- for pos < len(t.line) {
- if t.line[pos] == ' ' {
- break
- }
- pos++
- }
- for pos < len(t.line) {
- if t.line[pos] != ' ' {
- break
- }
- pos++
- }
- return pos - t.pos
-}
-
-// visualLength returns the number of visible glyphs in s.
-func visualLength(runes []rune) int {
- inEscapeSeq := false
- length := 0
-
- for _, r := range runes {
- switch {
- case inEscapeSeq:
- if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') {
- inEscapeSeq = false
- }
- case r == '\x1b':
- inEscapeSeq = true
- default:
- length++
- }
- }
-
- return length
-}
-
-// handleKey processes the given key and, optionally, returns a line of text
-// that the user has entered.
-func (t *Terminal) handleKey(key rune) (line string, ok bool) {
- if t.pasteActive && key != keyEnter {
- t.addKeyToLine(key)
- return
- }
-
- switch key {
- case keyBackspace:
- if t.pos == 0 {
- return
- }
- t.eraseNPreviousChars(1)
- case keyAltLeft:
- // move left by a word.
- t.pos -= t.countToLeftWord()
- t.moveCursorToPos(t.pos)
- case keyAltRight:
- // move right by a word.
- t.pos += t.countToRightWord()
- t.moveCursorToPos(t.pos)
- case keyLeft:
- if t.pos == 0 {
- return
- }
- t.pos--
- t.moveCursorToPos(t.pos)
- case keyRight:
- if t.pos == len(t.line) {
- return
- }
- t.pos++
- t.moveCursorToPos(t.pos)
- case keyHome:
- if t.pos == 0 {
- return
- }
- t.pos = 0
- t.moveCursorToPos(t.pos)
- case keyEnd:
- if t.pos == len(t.line) {
- return
- }
- t.pos = len(t.line)
- t.moveCursorToPos(t.pos)
- case keyUp:
- entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1)
- if !ok {
- return "", false
- }
- if t.historyIndex == -1 {
- t.historyPending = string(t.line)
- }
- t.historyIndex++
- runes := []rune(entry)
- t.setLine(runes, len(runes))
- case keyDown:
- switch t.historyIndex {
- case -1:
- return
- case 0:
- runes := []rune(t.historyPending)
- t.setLine(runes, len(runes))
- t.historyIndex--
- default:
- entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1)
- if ok {
- t.historyIndex--
- runes := []rune(entry)
- t.setLine(runes, len(runes))
- }
- }
- case keyEnter:
- t.moveCursorToPos(len(t.line))
- t.queue([]rune("\r\n"))
- line = string(t.line)
- ok = true
- t.line = t.line[:0]
- t.pos = 0
- t.cursorX = 0
- t.cursorY = 0
- t.maxLine = 0
- case keyDeleteWord:
- // Delete zero or more spaces and then one or more characters.
- t.eraseNPreviousChars(t.countToLeftWord())
- case keyDeleteLine:
- // Delete everything from the current cursor position to the
- // end of line.
- for i := t.pos; i < len(t.line); i++ {
- t.queue(space)
- t.advanceCursor(1)
- }
- t.line = t.line[:t.pos]
- t.moveCursorToPos(t.pos)
- case keyCtrlD:
- // Erase the character under the current position.
- // The EOF case when the line is empty is handled in
- // readLine().
- if t.pos < len(t.line) {
- t.pos++
- t.eraseNPreviousChars(1)
- }
- case keyCtrlU:
- t.eraseNPreviousChars(t.pos)
- case keyClearScreen:
- // Erases the screen and moves the cursor to the home position.
- t.queue([]rune("\x1b[2J\x1b[H"))
- t.queue(t.prompt)
- t.cursorX, t.cursorY = 0, 0
- t.advanceCursor(visualLength(t.prompt))
- t.setLine(t.line, t.pos)
- default:
- if t.AutoCompleteCallback != nil {
- prefix := string(t.line[:t.pos])
- suffix := string(t.line[t.pos:])
-
- t.lock.Unlock()
- newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key)
- t.lock.Lock()
-
- if completeOk {
- t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos]))
- return
- }
- }
- if !isPrintable(key) {
- return
- }
- if len(t.line) == maxLineLength {
- return
- }
- t.addKeyToLine(key)
- }
- return
-}
-
-// addKeyToLine inserts the given key at the current position in the current
-// line.
-func (t *Terminal) addKeyToLine(key rune) {
- if len(t.line) == cap(t.line) {
- newLine := make([]rune, len(t.line), 2*(1+len(t.line)))
- copy(newLine, t.line)
- t.line = newLine
- }
- t.line = t.line[:len(t.line)+1]
- copy(t.line[t.pos+1:], t.line[t.pos:])
- t.line[t.pos] = key
- if t.echo {
- t.writeLine(t.line[t.pos:])
- }
- t.pos++
- t.moveCursorToPos(t.pos)
-}
-
-func (t *Terminal) writeLine(line []rune) {
- for len(line) != 0 {
- remainingOnLine := t.termWidth - t.cursorX
- todo := len(line)
- if todo > remainingOnLine {
- todo = remainingOnLine
- }
- t.queue(line[:todo])
- t.advanceCursor(visualLength(line[:todo]))
- line = line[todo:]
- }
-}
-
-func (t *Terminal) Write(buf []byte) (n int, err error) {
- t.lock.Lock()
- defer t.lock.Unlock()
-
- if t.cursorX == 0 && t.cursorY == 0 {
- // This is the easy case: there's nothing on the screen that we
- // have to move out of the way.
- return t.c.Write(buf)
- }
-
- // We have a prompt and possibly user input on the screen. We
- // have to clear it first.
- t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */)
- t.cursorX = 0
- t.clearLineToRight()
-
- for t.cursorY > 0 {
- t.move(1 /* up */, 0, 0, 0)
- t.cursorY--
- t.clearLineToRight()
- }
-
- if _, err = t.c.Write(t.outBuf); err != nil {
- return
- }
- t.outBuf = t.outBuf[:0]
-
- if n, err = t.c.Write(buf); err != nil {
- return
- }
-
- t.writeLine(t.prompt)
- if t.echo {
- t.writeLine(t.line)
- }
-
- t.moveCursorToPos(t.pos)
-
- if _, err = t.c.Write(t.outBuf); err != nil {
- return
- }
- t.outBuf = t.outBuf[:0]
- return
-}
-
-// ReadPassword temporarily changes the prompt and reads a password, without
-// echo, from the terminal.
-func (t *Terminal) ReadPassword(prompt string) (line string, err error) {
- t.lock.Lock()
- defer t.lock.Unlock()
-
- oldPrompt := t.prompt
- t.prompt = []rune(prompt)
- t.echo = false
-
- line, err = t.readLine()
-
- t.prompt = oldPrompt
- t.echo = true
-
- return
-}
-
-// ReadLine returns a line of input from the terminal.
-func (t *Terminal) ReadLine() (line string, err error) {
- t.lock.Lock()
- defer t.lock.Unlock()
-
- return t.readLine()
-}
-
-func (t *Terminal) readLine() (line string, err error) {
- // t.lock must be held at this point
-
- if t.cursorX == 0 && t.cursorY == 0 {
- t.writeLine(t.prompt)
- t.c.Write(t.outBuf)
- t.outBuf = t.outBuf[:0]
- }
-
- lineIsPasted := t.pasteActive
-
- for {
- rest := t.remainder
- lineOk := false
- for !lineOk {
- var key rune
- key, rest = bytesToKey(rest, t.pasteActive)
- if key == utf8.RuneError {
- break
- }
- if !t.pasteActive {
- if key == keyCtrlD {
- if len(t.line) == 0 {
- return "", io.EOF
- }
- }
- if key == keyPasteStart {
- t.pasteActive = true
- if len(t.line) == 0 {
- lineIsPasted = true
- }
- continue
- }
- } else if key == keyPasteEnd {
- t.pasteActive = false
- continue
- }
- if !t.pasteActive {
- lineIsPasted = false
- }
- line, lineOk = t.handleKey(key)
- }
- if len(rest) > 0 {
- n := copy(t.inBuf[:], rest)
- t.remainder = t.inBuf[:n]
- } else {
- t.remainder = nil
- }
- t.c.Write(t.outBuf)
- t.outBuf = t.outBuf[:0]
- if lineOk {
- if t.echo {
- t.historyIndex = -1
- t.history.Add(line)
- }
- if lineIsPasted {
- err = ErrPasteIndicator
- }
- return
- }
-
- // t.remainder is a slice at the beginning of t.inBuf
- // containing a partial key sequence
- readBuf := t.inBuf[len(t.remainder):]
- var n int
-
- t.lock.Unlock()
- n, err = t.c.Read(readBuf)
- t.lock.Lock()
-
- if err != nil {
- return
- }
-
- t.remainder = t.inBuf[:n+len(t.remainder)]
- }
-
- panic("unreachable") // for Go 1.0.
-}
-
-// SetPrompt sets the prompt to be used when reading subsequent lines.
-func (t *Terminal) SetPrompt(prompt string) {
- t.lock.Lock()
- defer t.lock.Unlock()
-
- t.prompt = []rune(prompt)
-}
-
-func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) {
- // Move cursor to column zero at the start of the line.
- t.move(t.cursorY, 0, t.cursorX, 0)
- t.cursorX, t.cursorY = 0, 0
- t.clearLineToRight()
- for t.cursorY < numPrevLines {
- // Move down a line
- t.move(0, 1, 0, 0)
- t.cursorY++
- t.clearLineToRight()
- }
- // Move back to beginning.
- t.move(t.cursorY, 0, 0, 0)
- t.cursorX, t.cursorY = 0, 0
-
- t.queue(t.prompt)
- t.advanceCursor(visualLength(t.prompt))
- t.writeLine(t.line)
- t.moveCursorToPos(t.pos)
-}
-
-func (t *Terminal) SetSize(width, height int) error {
- t.lock.Lock()
- defer t.lock.Unlock()
-
- if width == 0 {
- width = 1
- }
-
- oldWidth := t.termWidth
- t.termWidth, t.termHeight = width, height
-
- switch {
- case width == oldWidth:
- // If the width didn't change then nothing else needs to be
- // done.
- return nil
- case len(t.line) == 0 && t.cursorX == 0 && t.cursorY == 0:
- // If there is nothing on current line and no prompt printed,
- // just do nothing
- return nil
- case width < oldWidth:
- // Some terminals (e.g. xterm) will truncate lines that were
- // too long when shinking. Others, (e.g. gnome-terminal) will
- // attempt to wrap them. For the former, repainting t.maxLine
- // works great, but that behaviour goes badly wrong in the case
- // of the latter because they have doubled every full line.
-
- // We assume that we are working on a terminal that wraps lines
- // and adjust the cursor position based on every previous line
- // wrapping and turning into two. This causes the prompt on
- // xterms to move upwards, which isn't great, but it avoids a
- // huge mess with gnome-terminal.
- if t.cursorX >= t.termWidth {
- t.cursorX = t.termWidth - 1
- }
- t.cursorY *= 2
- t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2)
- case width > oldWidth:
- // If the terminal expands then our position calculations will
- // be wrong in the future because we think the cursor is
- // |t.pos| chars into the string, but there will be a gap at
- // the end of any wrapped line.
- //
- // But the position will actually be correct until we move, so
- // we can move back to the beginning and repaint everything.
- t.clearAndRepaintLinePlusNPrevious(t.maxLine)
- }
-
- _, err := t.c.Write(t.outBuf)
- t.outBuf = t.outBuf[:0]
- return err
-}
-
-type pasteIndicatorError struct{}
-
-func (pasteIndicatorError) Error() string {
- return "terminal: ErrPasteIndicator not correctly handled"
-}
-
-// ErrPasteIndicator may be returned from ReadLine as the error, in addition
-// to valid line data. It indicates that bracketed paste mode is enabled and
-// that the returned line consists only of pasted data. Programs may wish to
-// interpret pasted data more literally than typed data.
-var ErrPasteIndicator = pasteIndicatorError{}
-
-// SetBracketedPasteMode requests that the terminal bracket paste operations
-// with markers. Not all terminals support this but, if it is supported, then
-// enabling this mode will stop any autocomplete callback from running due to
-// pastes. Additionally, any lines that are completely pasted will be returned
-// from ReadLine with the error set to ErrPasteIndicator.
-func (t *Terminal) SetBracketedPasteMode(on bool) {
- if on {
- io.WriteString(t.c, "\x1b[?2004h")
- } else {
- io.WriteString(t.c, "\x1b[?2004l")
- }
-}
-
-// stRingBuffer is a ring buffer of strings.
-type stRingBuffer struct {
- // entries contains max elements.
- entries []string
- max int
- // head contains the index of the element most recently added to the ring.
- head int
- // size contains the number of elements in the ring.
- size int
-}
-
-func (s *stRingBuffer) Add(a string) {
- if s.entries == nil {
- const defaultNumEntries = 100
- s.entries = make([]string, defaultNumEntries)
- s.max = defaultNumEntries
- }
-
- s.head = (s.head + 1) % s.max
- s.entries[s.head] = a
- if s.size < s.max {
- s.size++
- }
-}
-
-// NthPreviousEntry returns the value passed to the nth previous call to Add.
-// If n is zero then the immediately prior value is returned, if one, then the
-// next most recent, and so on. If such an element doesn't exist then ok is
-// false.
-func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) {
- if n >= s.size {
- return "", false
- }
- index := s.head - n
- if index < 0 {
- index += s.max
- }
- return s.entries[index], true
-}
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util.go b/vendor/golang.org/x/crypto/ssh/terminal/util.go
deleted file mode 100644
index 0763c9a..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util.go
+++ /dev/null
@@ -1,128 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build darwin dragonfly freebsd linux,!appengine netbsd openbsd
-
-// Package terminal provides support functions for dealing with terminals, as
-// commonly found on UNIX systems.
-//
-// Putting a terminal into raw mode is the most common requirement:
-//
-// oldState, err := terminal.MakeRaw(0)
-// if err != nil {
-// panic(err)
-// }
-// defer terminal.Restore(0, oldState)
-package terminal
-
-import (
- "io"
- "syscall"
- "unsafe"
-)
-
-// State contains the state of a terminal.
-type State struct {
- termios syscall.Termios
-}
-
-// IsTerminal returns true if the given file descriptor is a terminal.
-func IsTerminal(fd int) bool {
- var termios syscall.Termios
- _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
- return err == 0
-}
-
-// MakeRaw put the terminal connected to the given file descriptor into raw
-// mode and returns the previous state of the terminal so that it can be
-// restored.
-func MakeRaw(fd int) (*State, error) {
- var oldState State
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 {
- return nil, err
- }
-
- newState := oldState.termios
- newState.Iflag &^= syscall.ISTRIP | syscall.INLCR | syscall.ICRNL | syscall.IGNCR | syscall.IXON | syscall.IXOFF
- newState.Lflag &^= syscall.ECHO | syscall.ICANON | syscall.ISIG
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 {
- return nil, err
- }
-
- return &oldState, nil
-}
-
-// GetState returns the current state of a terminal which may be useful to
-// restore the terminal after a signal.
-func GetState(fd int) (*State, error) {
- var oldState State
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 {
- return nil, err
- }
-
- return &oldState, nil
-}
-
-// Restore restores the terminal connected to the given file descriptor to a
-// previous state.
-func Restore(fd int, state *State) error {
- _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&state.termios)), 0, 0, 0)
- return err
-}
-
-// GetSize returns the dimensions of the given terminal.
-func GetSize(fd int) (width, height int, err error) {
- var dimensions [4]uint16
-
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(syscall.TIOCGWINSZ), uintptr(unsafe.Pointer(&dimensions)), 0, 0, 0); err != 0 {
- return -1, -1, err
- }
- return int(dimensions[1]), int(dimensions[0]), nil
-}
-
-// ReadPassword reads a line of input from a terminal without local echo. This
-// is commonly used for inputting passwords and other sensitive data. The slice
-// returned does not include the \n.
-func ReadPassword(fd int) ([]byte, error) {
- var oldState syscall.Termios
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0); err != 0 {
- return nil, err
- }
-
- newState := oldState
- newState.Lflag &^= syscall.ECHO
- newState.Lflag |= syscall.ICANON | syscall.ISIG
- newState.Iflag |= syscall.ICRNL
- if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 {
- return nil, err
- }
-
- defer func() {
- syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0)
- }()
-
- var buf [16]byte
- var ret []byte
- for {
- n, err := syscall.Read(fd, buf[:])
- if err != nil {
- return nil, err
- }
- if n == 0 {
- if len(ret) == 0 {
- return nil, io.EOF
- }
- break
- }
- if buf[n-1] == '\n' {
- n--
- }
- ret = append(ret, buf[:n]...)
- if n < len(buf) {
- break
- }
- }
-
- return ret, nil
-}
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go b/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go
deleted file mode 100644
index 9c1ffd1..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build darwin dragonfly freebsd netbsd openbsd
-
-package terminal
-
-import "syscall"
-
-const ioctlReadTermios = syscall.TIOCGETA
-const ioctlWriteTermios = syscall.TIOCSETA
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go b/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go
deleted file mode 100644
index 5883b22..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package terminal
-
-// These constants are declared here, rather than importing
-// them from the syscall package as some syscall packages, even
-// on linux, for example gccgo, do not declare them.
-const ioctlReadTermios = 0x5401 // syscall.TCGETS
-const ioctlWriteTermios = 0x5402 // syscall.TCSETS
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go b/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go
deleted file mode 100644
index 2dd6c3d..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go
+++ /dev/null
@@ -1,174 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build windows
-
-// Package terminal provides support functions for dealing with terminals, as
-// commonly found on UNIX systems.
-//
-// Putting a terminal into raw mode is the most common requirement:
-//
-// oldState, err := terminal.MakeRaw(0)
-// if err != nil {
-// panic(err)
-// }
-// defer terminal.Restore(0, oldState)
-package terminal
-
-import (
- "io"
- "syscall"
- "unsafe"
-)
-
-const (
- enableLineInput = 2
- enableEchoInput = 4
- enableProcessedInput = 1
- enableWindowInput = 8
- enableMouseInput = 16
- enableInsertMode = 32
- enableQuickEditMode = 64
- enableExtendedFlags = 128
- enableAutoPosition = 256
- enableProcessedOutput = 1
- enableWrapAtEolOutput = 2
-)
-
-var kernel32 = syscall.NewLazyDLL("kernel32.dll")
-
-var (
- procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
- procSetConsoleMode = kernel32.NewProc("SetConsoleMode")
- procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
-)
-
-type (
- short int16
- word uint16
-
- coord struct {
- x short
- y short
- }
- smallRect struct {
- left short
- top short
- right short
- bottom short
- }
- consoleScreenBufferInfo struct {
- size coord
- cursorPosition coord
- attributes word
- window smallRect
- maximumWindowSize coord
- }
-)
-
-type State struct {
- mode uint32
-}
-
-// IsTerminal returns true if the given file descriptor is a terminal.
-func IsTerminal(fd int) bool {
- var st uint32
- r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
- return r != 0 && e == 0
-}
-
-// MakeRaw put the terminal connected to the given file descriptor into raw
-// mode and returns the previous state of the terminal so that it can be
-// restored.
-func MakeRaw(fd int) (*State, error) {
- var st uint32
- _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
- if e != 0 {
- return nil, error(e)
- }
- st &^= (enableEchoInput | enableProcessedInput | enableLineInput | enableProcessedOutput)
- _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0)
- if e != 0 {
- return nil, error(e)
- }
- return &State{st}, nil
-}
-
-// GetState returns the current state of a terminal which may be useful to
-// restore the terminal after a signal.
-func GetState(fd int) (*State, error) {
- var st uint32
- _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
- if e != 0 {
- return nil, error(e)
- }
- return &State{st}, nil
-}
-
-// Restore restores the terminal connected to the given file descriptor to a
-// previous state.
-func Restore(fd int, state *State) error {
- _, _, err := syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(state.mode), 0)
- return err
-}
-
-// GetSize returns the dimensions of the given terminal.
-func GetSize(fd int) (width, height int, err error) {
- var info consoleScreenBufferInfo
- _, _, e := syscall.Syscall(procGetConsoleScreenBufferInfo.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&info)), 0)
- if e != 0 {
- return 0, 0, error(e)
- }
- return int(info.size.x), int(info.size.y), nil
-}
-
-// ReadPassword reads a line of input from a terminal without local echo. This
-// is commonly used for inputting passwords and other sensitive data. The slice
-// returned does not include the \n.
-func ReadPassword(fd int) ([]byte, error) {
- var st uint32
- _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0)
- if e != 0 {
- return nil, error(e)
- }
- old := st
-
- st &^= (enableEchoInput)
- st |= (enableProcessedInput | enableLineInput | enableProcessedOutput)
- _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0)
- if e != 0 {
- return nil, error(e)
- }
-
- defer func() {
- syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(old), 0)
- }()
-
- var buf [16]byte
- var ret []byte
- for {
- n, err := syscall.Read(syscall.Handle(fd), buf[:])
- if err != nil {
- return nil, err
- }
- if n == 0 {
- if len(ret) == 0 {
- return nil, io.EOF
- }
- break
- }
- if buf[n-1] == '\n' {
- n--
- }
- if n > 0 && buf[n-1] == '\r' {
- n--
- }
- ret = append(ret, buf[:n]...)
- if n < len(buf) {
- break
- }
- }
-
- return ret, nil
-}
diff --git a/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json b/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
deleted file mode 100644
index 4058ba6..0000000
--- a/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
+++ /dev/null
@@ -1,2209 +0,0 @@
-{
- "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/n2LVhGPabQO3DmbKxkomJprJEEo\"",
- "discoveryVersion": "v1",
- "id": "bigquery:v2",
- "name": "bigquery",
- "version": "v2",
- "revision": "20141112",
- "title": "BigQuery API",
- "description": "A data platform for customers to create, manage, share and query data.",
- "ownerDomain": "google.com",
- "ownerName": "Google",
- "icons": {
- "x16": "https://www.google.com/images/icons/product/search-16.gif",
- "x32": "https://www.google.com/images/icons/product/search-32.gif"
- },
- "documentationLink": "https://cloud.google.com/bigquery/",
- "protocol": "rest",
- "baseUrl": "https://www.googleapis.com/bigquery/v2/",
- "basePath": "/bigquery/v2/",
- "rootUrl": "https://www.googleapis.com/",
- "servicePath": "bigquery/v2/",
- "batchPath": "batch",
- "parameters": {
- "alt": {
- "type": "string",
- "description": "Data format for the response.",
- "default": "json",
- "enum": [
- "csv",
- "json"
- ],
- "enumDescriptions": [
- "Responses with Content-Type of text/csv",
- "Responses with Content-Type of application/json"
- ],
- "location": "query"
- },
- "fields": {
- "type": "string",
- "description": "Selector specifying which fields to include in a partial response.",
- "location": "query"
- },
- "key": {
- "type": "string",
- "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
- "location": "query"
- },
- "oauth_token": {
- "type": "string",
- "description": "OAuth 2.0 token for the current user.",
- "location": "query"
- },
- "prettyPrint": {
- "type": "boolean",
- "description": "Returns response with indentations and line breaks.",
- "default": "true",
- "location": "query"
- },
- "quotaUser": {
- "type": "string",
- "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
- "location": "query"
- },
- "userIp": {
- "type": "string",
- "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
- "location": "query"
- }
- },
- "auth": {
- "oauth2": {
- "scopes": {
- "https://www.googleapis.com/auth/bigquery": {
- "description": "View and manage your data in Google BigQuery"
- },
- "https://www.googleapis.com/auth/bigquery.insertdata": {
- "description": "Insert data into Google BigQuery"
- },
- "https://www.googleapis.com/auth/cloud-platform": {
- "description": "View and manage your data across Google Cloud Platform services"
- },
- "https://www.googleapis.com/auth/devstorage.full_control": {
- "description": "Manage your data and permissions in Google Cloud Storage"
- },
- "https://www.googleapis.com/auth/devstorage.read_only": {
- "description": "View your data in Google Cloud Storage"
- },
- "https://www.googleapis.com/auth/devstorage.read_write": {
- "description": "Manage your data in Google Cloud Storage"
- }
- }
- }
- },
- "schemas": {
- "CsvOptions": {
- "id": "CsvOptions",
- "type": "object",
- "properties": {
- "allowJaggedRows": {
- "type": "boolean",
- "description": "[Optional] Indicates if BigQuery should accept rows that are missing trailing optional columns. If true, BigQuery treats missing trailing columns as null values. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false."
- },
- "allowQuotedNewlines": {
- "type": "boolean",
- "description": "[Optional] Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false."
- },
- "encoding": {
- "type": "string",
- "description": "[Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties."
- },
- "fieldDelimiter": {
- "type": "string",
- "description": "[Optional] The separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence \"\\t\" to specify a tab separator. The default value is a comma (',')."
- },
- "quote": {
- "type": "string",
- "description": "[Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('\"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true."
- },
- "skipLeadingRows": {
- "type": "integer",
- "description": "[Optional] The number of rows at the top of a CSV file that BigQuery will skip when reading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped.",
- "format": "int32"
- }
- }
- },
- "Dataset": {
- "id": "Dataset",
- "type": "object",
- "properties": {
- "access": {
- "type": "array",
- "description": "[Optional] An array of objects that define dataset access for one or more entities. You can set this property when inserting or updating a dataset in order to control who is allowed to access the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the following entities: access.specialGroup: projectReaders; access.role: READER; access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER;",
- "items": {
- "type": "object",
- "properties": {
- "domain": {
- "type": "string",
- "description": "[Pick one] A domain to grant access to. Any users signed in with the domain specified will be granted the specified access. Example: \"example.com\"."
- },
- "groupByEmail": {
- "type": "string",
- "description": "[Pick one] An email address of a Google Group to grant access to."
- },
- "role": {
- "type": "string",
- "description": "[Required] Describes the rights granted to the user specified by the other member of the access object. The following string values are supported: READER, WRITER, OWNER."
- },
- "specialGroup": {
- "type": "string",
- "description": "[Pick one] A special group to grant access to. Possible values include: projectOwners: Owners of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters: Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users."
- },
- "userByEmail": {
- "type": "string",
- "description": "[Pick one] An email address of a user to grant access to. For example: fred@example.com."
- },
- "view": {
- "$ref": "TableReference",
- "description": "[Pick one] A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation."
- }
- }
- }
- },
- "creationTime": {
- "type": "string",
- "description": "[Output-only] The time when this dataset was created, in milliseconds since the epoch.",
- "format": "int64"
- },
- "datasetReference": {
- "$ref": "DatasetReference",
- "description": "[Required] A reference that identifies the dataset."
- },
- "defaultTableExpirationMs": {
- "type": "string",
- "description": "[Experimental] The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.",
- "format": "int64"
- },
- "description": {
- "type": "string",
- "description": "[Optional] A user-friendly description of the dataset."
- },
- "etag": {
- "type": "string",
- "description": "[Output-only] A hash of the resource."
- },
- "friendlyName": {
- "type": "string",
- "description": "[Optional] A descriptive name for the dataset."
- },
- "id": {
- "type": "string",
- "description": "[Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId. The dataset name without the project name is given in the datasetId field. When creating a new dataset, leave this field blank, and instead specify the datasetId field."
- },
- "kind": {
- "type": "string",
- "description": "[Output-only] The resource type.",
- "default": "bigquery#dataset"
- },
- "lastModifiedTime": {
- "type": "string",
- "description": "[Output-only] The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.",
- "format": "int64"
- },
- "location": {
- "type": "string",
- "description": "[Experimental] The location where the data resides. If not present, the data will be stored in the US."
- },
- "selfLink": {
- "type": "string",
- "description": "[Output-only] A URL that can be used to access the resource again. You can use this URL in Get or Update requests to the resource."
- }
- }
- },
- "DatasetList": {
- "id": "DatasetList",
- "type": "object",
- "properties": {
- "datasets": {
- "type": "array",
- "description": "An array of the dataset resources in the project. Each resource contains basic information. For full information about a particular dataset resource, use the Datasets: get method. This property is omitted when there are no datasets in the project.",
- "items": {
- "type": "object",
- "properties": {
- "datasetReference": {
- "$ref": "DatasetReference",
- "description": "The dataset reference. Use this property to access specific parts of the dataset's ID, such as project ID or dataset ID."
- },
- "friendlyName": {
- "type": "string",
- "description": "A descriptive name for the dataset, if one exists."
- },
- "id": {
- "type": "string",
- "description": "The fully-qualified, unique, opaque ID of the dataset."
- },
- "kind": {
- "type": "string",
- "description": "The resource type. This property always returns the value \"bigquery#dataset\".",
- "default": "bigquery#dataset"
- }
- }
- }
- },
- "etag": {
- "type": "string",
- "description": "A hash value of the results page. You can use this property to determine if the page has changed since the last request."
- },
- "kind": {
- "type": "string",
- "description": "The list type. This property always returns the value \"bigquery#datasetList\".",
- "default": "bigquery#datasetList"
- },
- "nextPageToken": {
- "type": "string",
- "description": "A token that can be used to request the next results page. This property is omitted on the final results page."
- }
- }
- },
- "DatasetReference": {
- "id": "DatasetReference",
- "type": "object",
- "properties": {
- "datasetId": {
- "type": "string",
- "description": "[Required] A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.",
- "annotations": {
- "required": [
- "bigquery.datasets.update"
- ]
- }
- },
- "projectId": {
- "type": "string",
- "description": "[Optional] The ID of the project containing this dataset.",
- "annotations": {
- "required": [
- "bigquery.datasets.update"
- ]
- }
- }
- }
- },
- "ErrorProto": {
- "id": "ErrorProto",
- "type": "object",
- "properties": {
- "debugInfo": {
- "type": "string",
- "description": "Debugging information. This property is internal to Google and should not be used."
- },
- "location": {
- "type": "string",
- "description": "Specifies where the error occurred, if present."
- },
- "message": {
- "type": "string",
- "description": "A human-readable description of the error."
- },
- "reason": {
- "type": "string",
- "description": "A short error code that summarizes the error."
- }
- }
- },
- "ExternalDataConfiguration": {
- "id": "ExternalDataConfiguration",
- "type": "object",
- "properties": {
- "compression": {
- "type": "string",
- "description": "[Optional] The compression type of the data source. Possible values include GZIP and NONE. The default value is NONE."
- },
- "csvOptions": {
- "$ref": "CsvOptions",
- "description": "Additional properties to set if sourceFormat is set to CSV."
- },
- "ignoreUnknownValues": {
- "type": "boolean",
- "description": "[Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns"
- },
- "maxBadRecords": {
- "type": "integer",
- "description": "[Optional] The maximum number of bad records that BigQuery can ignore when reading data. If the number of bad records exceeds this value, an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.",
- "format": "int32"
- },
- "schema": {
- "$ref": "TableSchema",
- "description": "[Required] The schema for the data."
- },
- "sourceFormat": {
- "type": "string",
- "description": "[Optional] The data format. External data sources must be in CSV format. The default value is CSV."
- },
- "sourceUris": {
- "type": "array",
- "description": "[Required] The fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can contain one '*' wildcard character and it must come after the 'bucket' name. CSV limits related to load jobs apply to external data sources, plus an additional limit of 10 GB maximum size across all URIs.",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "GetQueryResultsResponse": {
- "id": "GetQueryResultsResponse",
- "type": "object",
- "properties": {
- "cacheHit": {
- "type": "boolean",
- "description": "Whether the query result was fetched from the query cache."
- },
- "etag": {
- "type": "string",
- "description": "A hash of this response."
- },
- "jobComplete": {
- "type": "boolean",
- "description": "Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available."
- },
- "jobReference": {
- "$ref": "JobReference",
- "description": "Reference to the BigQuery Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults)."
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the response.",
- "default": "bigquery#getQueryResultsResponse"
- },
- "pageToken": {
- "type": "string",
- "description": "A token used for paging results."
- },
- "rows": {
- "type": "array",
- "description": "An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above. Present only when the query completes successfully.",
- "items": {
- "$ref": "TableRow"
- }
- },
- "schema": {
- "$ref": "TableSchema",
- "description": "The schema of the results. Present only when the query completes successfully."
- },
- "totalBytesProcessed": {
- "type": "string",
- "description": "The total number of bytes processed for this query.",
- "format": "int64"
- },
- "totalRows": {
- "type": "string",
- "description": "The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results. Present only when the query completes successfully.",
- "format": "uint64"
- }
- }
- },
- "Job": {
- "id": "Job",
- "type": "object",
- "properties": {
- "configuration": {
- "$ref": "JobConfiguration",
- "description": "[Required] Describes the job configuration."
- },
- "etag": {
- "type": "string",
- "description": "[Output-only] A hash of this resource."
- },
- "id": {
- "type": "string",
- "description": "[Output-only] Opaque ID field of the job"
- },
- "jobReference": {
- "$ref": "JobReference",
- "description": "[Optional] Reference describing the unique-per-user name of the job."
- },
- "kind": {
- "type": "string",
- "description": "[Output-only] The type of the resource.",
- "default": "bigquery#job"
- },
- "selfLink": {
- "type": "string",
- "description": "[Output-only] A URL that can be used to access this resource again."
- },
- "statistics": {
- "$ref": "JobStatistics",
- "description": "[Output-only] Information about the job, including starting time and ending time of the job."
- },
- "status": {
- "$ref": "JobStatus",
- "description": "[Output-only] The status of this job. Examine this value when polling an asynchronous job to see if the job is complete."
- },
- "user_email": {
- "type": "string",
- "description": "[Output-only] Email address of the user who ran the job."
- }
- }
- },
- "JobConfiguration": {
- "id": "JobConfiguration",
- "type": "object",
- "properties": {
- "copy": {
- "$ref": "JobConfigurationTableCopy",
- "description": "[Pick one] Copies a table."
- },
- "dryRun": {
- "type": "boolean",
- "description": "[Optional] If set, don't actually run this job. A valid query will return a mostly empty response with some processing statistics, while an invalid query will return the same error it would if it wasn't a dry run. Behavior of non-query jobs is undefined."
- },
- "extract": {
- "$ref": "JobConfigurationExtract",
- "description": "[Pick one] Configures an extract job."
- },
- "link": {
- "$ref": "JobConfigurationLink",
- "description": "[Pick one] Configures a link job."
- },
- "load": {
- "$ref": "JobConfigurationLoad",
- "description": "[Pick one] Configures a load job."
- },
- "query": {
- "$ref": "JobConfigurationQuery",
- "description": "[Pick one] Configures a query job."
- }
- }
- },
- "JobConfigurationExtract": {
- "id": "JobConfigurationExtract",
- "type": "object",
- "properties": {
- "compression": {
- "type": "string",
- "description": "[Optional] The compression type to use for exported files. Possible values include GZIP and NONE. The default value is NONE."
- },
- "destinationFormat": {
- "type": "string",
- "description": "[Optional] The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with nested or repeated fields cannot be exported as CSV."
- },
- "destinationUri": {
- "type": "string",
- "description": "[Pick one] DEPRECATED: Use destinationUris instead, passing only one URI as necessary. The fully-qualified Google Cloud Storage URI where the extracted table should be written."
- },
- "destinationUris": {
- "type": "array",
- "description": "[Pick one] A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written.",
- "items": {
- "type": "string"
- }
- },
- "fieldDelimiter": {
- "type": "string",
- "description": "[Optional] Delimiter to use between fields in the exported data. Default is ','"
- },
- "printHeader": {
- "type": "boolean",
- "description": "[Optional] Whether to print out a header row in the results. Default is true."
- },
- "sourceTable": {
- "$ref": "TableReference",
- "description": "[Required] A reference to the table being exported."
- }
- }
- },
- "JobConfigurationLink": {
- "id": "JobConfigurationLink",
- "type": "object",
- "properties": {
- "createDisposition": {
- "type": "string",
- "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
- },
- "destinationTable": {
- "$ref": "TableReference",
- "description": "[Required] The destination table of the link job."
- },
- "sourceUri": {
- "type": "array",
- "description": "[Required] URI of source table to link.",
- "items": {
- "type": "string"
- }
- },
- "writeDisposition": {
- "type": "string",
- "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
- }
- }
- },
- "JobConfigurationLoad": {
- "id": "JobConfigurationLoad",
- "type": "object",
- "properties": {
- "allowJaggedRows": {
- "type": "boolean",
- "description": "[Optional] Accept rows that are missing trailing optional columns. The missing values are treated as nulls. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats."
- },
- "allowQuotedNewlines": {
- "type": "boolean",
- "description": "Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false."
- },
- "createDisposition": {
- "type": "string",
- "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
- },
- "destinationTable": {
- "$ref": "TableReference",
- "description": "[Required] The destination table to load the data into."
- },
- "encoding": {
- "type": "string",
- "description": "[Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties."
- },
- "fieldDelimiter": {
- "type": "string",
- "description": "[Optional] The separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence \"\\t\" to specify a tab separator. The default value is a comma (',')."
- },
- "ignoreUnknownValues": {
- "type": "boolean",
- "description": "[Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values that don't match any column names"
- },
- "maxBadRecords": {
- "type": "integer",
- "description": "[Optional] The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.",
- "format": "int32"
- },
- "projectionFields": {
- "type": "array",
- "description": "[Experimental] If sourceFormat is set to \"DATASTORE_BACKUP\", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result.",
- "items": {
- "type": "string"
- }
- },
- "quote": {
- "type": "string",
- "description": "[Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('\"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true."
- },
- "schema": {
- "$ref": "TableSchema",
- "description": "[Optional] The schema for the destination table. The schema can be omitted if the destination table already exists or if the schema can be inferred from the loaded data."
- },
- "schemaInline": {
- "type": "string",
- "description": "[Deprecated] The inline schema. For CSV schemas, specify as \"Field1:Type1[,Field2:Type2]*\". For example, \"foo:STRING, bar:INTEGER, baz:FLOAT\"."
- },
- "schemaInlineFormat": {
- "type": "string",
- "description": "[Deprecated] The format of the schemaInline property."
- },
- "skipLeadingRows": {
- "type": "integer",
- "description": "[Optional] The number of rows at the top of a CSV file that BigQuery will skip when loading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped.",
- "format": "int32"
- },
- "sourceFormat": {
- "type": "string",
- "description": "[Optional] The format of the data files. For CSV files, specify \"CSV\". For datastore backups, specify \"DATASTORE_BACKUP\". For newline-delimited JSON, specify \"NEWLINE_DELIMITED_JSON\". The default value is CSV."
- },
- "sourceUris": {
- "type": "array",
- "description": "[Required] The fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can contain one '*' wildcard character and it must come after the 'bucket' name.",
- "items": {
- "type": "string"
- }
- },
- "writeDisposition": {
- "type": "string",
- "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
- }
- }
- },
- "JobConfigurationQuery": {
- "id": "JobConfigurationQuery",
- "type": "object",
- "properties": {
- "allowLargeResults": {
- "type": "boolean",
- "description": "If true, allows the query to produce arbitrarily large result tables at a slight cost in performance. Requires destinationTable to be set."
- },
- "createDisposition": {
- "type": "string",
- "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
- },
- "defaultDataset": {
- "$ref": "DatasetReference",
- "description": "[Optional] Specifies the default dataset to use for unqualified table names in the query."
- },
- "destinationTable": {
- "$ref": "TableReference",
- "description": "[Optional] Describes the table where the query results should be stored. If not present, a new table will be created to store the results."
- },
- "flattenResults": {
- "type": "boolean",
- "description": "[Optional] Flattens all nested and repeated fields in the query results. The default value is true. allowLargeResults must be true if this is set to false."
- },
- "preserveNulls": {
- "type": "boolean",
- "description": "[Deprecated] This property is deprecated."
- },
- "priority": {
- "type": "string",
- "description": "[Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH. The default value is INTERACTIVE."
- },
- "query": {
- "type": "string",
- "description": "[Required] BigQuery SQL query to execute."
- },
- "tableDefinitions": {
- "type": "object",
- "description": "[Experimental] If querying an external data source outside of BigQuery, describes the data format, location and other properties of the data source. By defining these properties, the data source can then be queried as if it were a standard BigQuery table.",
- "additionalProperties": {
- "$ref": "ExternalDataConfiguration"
- }
- },
- "useQueryCache": {
- "type": "boolean",
- "description": "[Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified."
- },
- "writeDisposition": {
- "type": "string",
- "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
- }
- }
- },
- "JobConfigurationTableCopy": {
- "id": "JobConfigurationTableCopy",
- "type": "object",
- "properties": {
- "createDisposition": {
- "type": "string",
- "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
- },
- "destinationTable": {
- "$ref": "TableReference",
- "description": "[Required] The destination table"
- },
- "sourceTable": {
- "$ref": "TableReference",
- "description": "[Pick one] Source table to copy."
- },
- "sourceTables": {
- "type": "array",
- "description": "[Pick one] Source tables to copy.",
- "items": {
- "$ref": "TableReference"
- }
- },
- "writeDisposition": {
- "type": "string",
- "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
- }
- }
- },
- "JobList": {
- "id": "JobList",
- "type": "object",
- "properties": {
- "etag": {
- "type": "string",
- "description": "A hash of this page of results."
- },
- "jobs": {
- "type": "array",
- "description": "List of jobs that were requested.",
- "items": {
- "type": "object",
- "properties": {
- "configuration": {
- "$ref": "JobConfiguration",
- "description": "[Full-projection-only] Specifies the job configuration."
- },
- "errorResult": {
- "$ref": "ErrorProto",
- "description": "A result object that will be present only if the job has failed."
- },
- "id": {
- "type": "string",
- "description": "Unique opaque ID of the job."
- },
- "jobReference": {
- "$ref": "JobReference",
- "description": "Job reference uniquely identifying the job."
- },
- "kind": {
- "type": "string",
- "description": "The resource type.",
- "default": "bigquery#job"
- },
- "state": {
- "type": "string",
- "description": "Running state of the job. When the state is DONE, errorResult can be checked to determine whether the job succeeded or failed."
- },
- "statistics": {
- "$ref": "JobStatistics",
- "description": "[Output-only] Information about the job, including starting time and ending time of the job."
- },
- "status": {
- "$ref": "JobStatus",
- "description": "[Full-projection-only] Describes the state of the job."
- },
- "user_email": {
- "type": "string",
- "description": "[Full-projection-only] Email address of the user who ran the job."
- }
- }
- }
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the response.",
- "default": "bigquery#jobList"
- },
- "nextPageToken": {
- "type": "string",
- "description": "A token to request the next page of results."
- },
- "totalItems": {
- "type": "integer",
- "description": "Total number of jobs in this collection.",
- "format": "int32"
- }
- }
- },
- "JobReference": {
- "id": "JobReference",
- "type": "object",
- "properties": {
- "jobId": {
- "type": "string",
- "description": "[Required] The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters.",
- "annotations": {
- "required": [
- "bigquery.jobs.getQueryResults"
- ]
- }
- },
- "projectId": {
- "type": "string",
- "description": "[Required] The ID of the project containing this job.",
- "annotations": {
- "required": [
- "bigquery.jobs.getQueryResults"
- ]
- }
- }
- }
- },
- "JobStatistics": {
- "id": "JobStatistics",
- "type": "object",
- "properties": {
- "creationTime": {
- "type": "string",
- "description": "[Output-only] Creation time of this job, in milliseconds since the epoch. This field will be present on all jobs.",
- "format": "int64"
- },
- "endTime": {
- "type": "string",
- "description": "[Output-only] End time of this job, in milliseconds since the epoch. This field will be present whenever a job is in the DONE state.",
- "format": "int64"
- },
- "extract": {
- "$ref": "JobStatistics4",
- "description": "[Output-only] Statistics for an extract job."
- },
- "load": {
- "$ref": "JobStatistics3",
- "description": "[Output-only] Statistics for a load job."
- },
- "query": {
- "$ref": "JobStatistics2",
- "description": "[Output-only] Statistics for a query job."
- },
- "startTime": {
- "type": "string",
- "description": "[Output-only] Start time of this job, in milliseconds since the epoch. This field will be present when the job transitions from the PENDING state to either RUNNING or DONE.",
- "format": "int64"
- },
- "totalBytesProcessed": {
- "type": "string",
- "description": "[Output-only] [Deprecated] Use the bytes processed in the query statistics instead.",
- "format": "int64"
- }
- }
- },
- "JobStatistics2": {
- "id": "JobStatistics2",
- "type": "object",
- "properties": {
- "cacheHit": {
- "type": "boolean",
- "description": "[Output-only] Whether the query result was fetched from the query cache."
- },
- "totalBytesProcessed": {
- "type": "string",
- "description": "[Output-only] Total bytes processed for this job.",
- "format": "int64"
- }
- }
- },
- "JobStatistics3": {
- "id": "JobStatistics3",
- "type": "object",
- "properties": {
- "inputFileBytes": {
- "type": "string",
- "description": "[Output-only] Number of bytes of source data in a joad job.",
- "format": "int64"
- },
- "inputFiles": {
- "type": "string",
- "description": "[Output-only] Number of source files in a load job.",
- "format": "int64"
- },
- "outputBytes": {
- "type": "string",
- "description": "[Output-only] Size of the loaded data in bytes. Note that while an import job is in the running state, this value may change.",
- "format": "int64"
- },
- "outputRows": {
- "type": "string",
- "description": "[Output-only] Number of rows imported in a load job. Note that while an import job is in the running state, this value may change.",
- "format": "int64"
- }
- }
- },
- "JobStatistics4": {
- "id": "JobStatistics4",
- "type": "object",
- "properties": {
- "destinationUriFileCounts": {
- "type": "array",
- "description": "[Experimental] Number of files per destination URI or URI pattern specified in the extract configuration. These values will be in the same order as the URIs specified in the 'destinationUris' field.",
- "items": {
- "type": "string",
- "format": "int64"
- }
- }
- }
- },
- "JobStatus": {
- "id": "JobStatus",
- "type": "object",
- "properties": {
- "errorResult": {
- "$ref": "ErrorProto",
- "description": "[Output-only] Final error result of the job. If present, indicates that the job has completed and was unsuccessful."
- },
- "errors": {
- "type": "array",
- "description": "[Output-only] All errors encountered during the running of the job. Errors here do not necessarily mean that the job has completed or was unsuccessful.",
- "items": {
- "$ref": "ErrorProto"
- }
- },
- "state": {
- "type": "string",
- "description": "[Output-only] Running state of the job."
- }
- }
- },
- "JsonObject": {
- "id": "JsonObject",
- "type": "object",
- "description": "Represents a single JSON object.",
- "additionalProperties": {
- "$ref": "JsonValue"
- }
- },
- "JsonValue": {
- "id": "JsonValue",
- "type": "any"
- },
- "ProjectList": {
- "id": "ProjectList",
- "type": "object",
- "properties": {
- "etag": {
- "type": "string",
- "description": "A hash of the page of results"
- },
- "kind": {
- "type": "string",
- "description": "The type of list.",
- "default": "bigquery#projectList"
- },
- "nextPageToken": {
- "type": "string",
- "description": "A token to request the next page of results."
- },
- "projects": {
- "type": "array",
- "description": "Projects to which you have at least READ access.",
- "items": {
- "type": "object",
- "properties": {
- "friendlyName": {
- "type": "string",
- "description": "A descriptive name for this project."
- },
- "id": {
- "type": "string",
- "description": "An opaque ID of this project."
- },
- "kind": {
- "type": "string",
- "description": "The resource type.",
- "default": "bigquery#project"
- },
- "numericId": {
- "type": "string",
- "description": "The numeric ID of this project.",
- "format": "uint64"
- },
- "projectReference": {
- "$ref": "ProjectReference",
- "description": "A unique reference to this project."
- }
- }
- }
- },
- "totalItems": {
- "type": "integer",
- "description": "The total number of projects in the list.",
- "format": "int32"
- }
- }
- },
- "ProjectReference": {
- "id": "ProjectReference",
- "type": "object",
- "properties": {
- "projectId": {
- "type": "string",
- "description": "[Required] ID of the project. Can be either the numeric ID or the assigned ID of the project."
- }
- }
- },
- "QueryRequest": {
- "id": "QueryRequest",
- "type": "object",
- "properties": {
- "defaultDataset": {
- "$ref": "DatasetReference",
- "description": "[Optional] Specifies the default datasetId and projectId to assume for any unqualified table names in the query. If not set, all table names in the query string must be qualified in the format 'datasetId.tableId'."
- },
- "dryRun": {
- "type": "boolean",
- "description": "[Optional] If set, don't actually run this job. A valid query will return a mostly empty response with some processing statistics, while an invalid query will return the same error it would if it wasn't a dry run."
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the request.",
- "default": "bigquery#queryRequest"
- },
- "maxResults": {
- "type": "integer",
- "description": "[Optional] The maximum number of rows of data to return per page of results. Setting this flag to a small value such as 1000 and then paging through results might improve reliability when the query result set is large. In addition to this limit, responses are also limited to 10 MB. By default, there is no maximum row count, and only the byte limit applies.",
- "format": "uint32"
- },
- "preserveNulls": {
- "type": "boolean",
- "description": "[Deprecated] This property is deprecated."
- },
- "query": {
- "type": "string",
- "description": "[Required] A query string, following the BigQuery query syntax, of the query to execute. Example: \"SELECT count(f1) FROM [myProjectId:myDatasetId.myTableId]\".",
- "annotations": {
- "required": [
- "bigquery.jobs.query"
- ]
- }
- },
- "timeoutMs": {
- "type": "integer",
- "description": "[Optional] How long to wait for the query to complete, in milliseconds, before the request times out and returns. Note that this is only a timeout for the request, not the query. If the query takes longer to run than the timeout value, the call returns without any results and with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the query to complete and read the results. The default value is 10000 milliseconds (10 seconds).",
- "format": "uint32"
- },
- "useQueryCache": {
- "type": "boolean",
- "description": "[Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. The default value is true."
- }
- }
- },
- "QueryResponse": {
- "id": "QueryResponse",
- "type": "object",
- "properties": {
- "cacheHit": {
- "type": "boolean",
- "description": "Whether the query result was fetched from the query cache."
- },
- "jobComplete": {
- "type": "boolean",
- "description": "Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available."
- },
- "jobReference": {
- "$ref": "JobReference",
- "description": "Reference to the Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults)."
- },
- "kind": {
- "type": "string",
- "description": "The resource type.",
- "default": "bigquery#queryResponse"
- },
- "pageToken": {
- "type": "string",
- "description": "A token used for paging results."
- },
- "rows": {
- "type": "array",
- "description": "An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above.",
- "items": {
- "$ref": "TableRow"
- }
- },
- "schema": {
- "$ref": "TableSchema",
- "description": "The schema of the results. Present only when the query completes successfully."
- },
- "totalBytesProcessed": {
- "type": "string",
- "description": "The total number of bytes processed for this query. If this query was a dry run, this is the number of bytes that would be processed if the query were run.",
- "format": "int64"
- },
- "totalRows": {
- "type": "string",
- "description": "The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results.",
- "format": "uint64"
- }
- }
- },
- "Table": {
- "id": "Table",
- "type": "object",
- "properties": {
- "creationTime": {
- "type": "string",
- "description": "[Output-only] The time when this table was created, in milliseconds since the epoch.",
- "format": "int64"
- },
- "description": {
- "type": "string",
- "description": "[Optional] A user-friendly description of this table."
- },
- "etag": {
- "type": "string",
- "description": "[Output-only] A hash of this resource."
- },
- "expirationTime": {
- "type": "string",
- "description": "[Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed.",
- "format": "int64"
- },
- "friendlyName": {
- "type": "string",
- "description": "[Optional] A descriptive name for this table."
- },
- "id": {
- "type": "string",
- "description": "[Output-only] An opaque ID uniquely identifying the table."
- },
- "kind": {
- "type": "string",
- "description": "[Output-only] The type of the resource.",
- "default": "bigquery#table"
- },
- "lastModifiedTime": {
- "type": "string",
- "description": "[Output-only] The time when this table was last modified, in milliseconds since the epoch.",
- "format": "uint64"
- },
- "numBytes": {
- "type": "string",
- "description": "[Output-only] The size of the table in bytes. This property is unavailable for tables that are actively receiving streaming inserts.",
- "format": "int64"
- },
- "numRows": {
- "type": "string",
- "description": "[Output-only] The number of rows of data in this table. This property is unavailable for tables that are actively receiving streaming inserts.",
- "format": "uint64"
- },
- "schema": {
- "$ref": "TableSchema",
- "description": "[Optional] Describes the schema of this table."
- },
- "selfLink": {
- "type": "string",
- "description": "[Output-only] A URL that can be used to access this resource again."
- },
- "tableReference": {
- "$ref": "TableReference",
- "description": "[Required] Reference describing the ID of this table."
- },
- "type": {
- "type": "string",
- "description": "[Output-only] Describes the table type. The following values are supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined by a SQL query. The default value is TABLE."
- },
- "view": {
- "$ref": "ViewDefinition",
- "description": "[Optional] The view definition."
- }
- }
- },
- "TableCell": {
- "id": "TableCell",
- "type": "object",
- "description": "Represents a single cell in the result set. Users of the java client can detect whether their value result is null by calling 'com.google.api.client.util.Data.isNull(cell.getV())'.",
- "properties": {
- "v": {
- "type": "any"
- }
- }
- },
- "TableDataInsertAllRequest": {
- "id": "TableDataInsertAllRequest",
- "type": "object",
- "properties": {
- "ignoreUnknownValues": {
- "type": "boolean",
- "description": "[Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. Default is false, which treats unknown values as errors."
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the response.",
- "default": "bigquery#tableDataInsertAllRequest"
- },
- "rows": {
- "type": "array",
- "description": "The rows to insert.",
- "items": {
- "type": "object",
- "properties": {
- "insertId": {
- "type": "string",
- "description": "[Optional] A unique ID for each row. BigQuery uses this property to detect duplicate insertion requests on a best-effort basis."
- },
- "json": {
- "$ref": "JsonObject",
- "description": "[Required] A JSON object that contains a row of data. The object's properties and values must match the destination table's schema."
- }
- }
- }
- },
- "skipInvalidRows": {
- "type": "boolean",
- "description": "[Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist."
- }
- }
- },
- "TableDataInsertAllResponse": {
- "id": "TableDataInsertAllResponse",
- "type": "object",
- "properties": {
- "insertErrors": {
- "type": "array",
- "description": "An array of errors for rows that were not inserted.",
- "items": {
- "type": "object",
- "properties": {
- "errors": {
- "type": "array",
- "description": "Error information for the row indicated by the index property.",
- "items": {
- "$ref": "ErrorProto"
- }
- },
- "index": {
- "type": "integer",
- "description": "The index of the row that error applies to.",
- "format": "uint32"
- }
- }
- }
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the response.",
- "default": "bigquery#tableDataInsertAllResponse"
- }
- }
- },
- "TableDataList": {
- "id": "TableDataList",
- "type": "object",
- "properties": {
- "etag": {
- "type": "string",
- "description": "A hash of this page of results."
- },
- "kind": {
- "type": "string",
- "description": "The resource type of the response.",
- "default": "bigquery#tableDataList"
- },
- "pageToken": {
- "type": "string",
- "description": "A token used for paging results. Providing this token instead of the startIndex parameter can help you retrieve stable results when an underlying table is changing."
- },
- "rows": {
- "type": "array",
- "description": "Rows of results.",
- "items": {
- "$ref": "TableRow"
- }
- },
- "totalRows": {
- "type": "string",
- "description": "The total number of rows in the complete table.",
- "format": "int64"
- }
- }
- },
- "TableFieldSchema": {
- "id": "TableFieldSchema",
- "type": "object",
- "properties": {
- "description": {
- "type": "string",
- "description": "[Optional] The field description. The maximum length is 16K characters."
- },
- "fields": {
- "type": "array",
- "description": "[Optional] Describes the nested schema fields if the type property is set to RECORD.",
- "items": {
- "$ref": "TableFieldSchema"
- }
- },
- "mode": {
- "type": "string",
- "description": "[Optional] The field mode. Possible values include NULLABLE, REQUIRED and REPEATED. The default value is NULLABLE."
- },
- "name": {
- "type": "string",
- "description": "[Required] The field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a letter or underscore. The maximum length is 128 characters."
- },
- "type": {
- "type": "string",
- "description": "[Required] The field data type. Possible values include STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP or RECORD (where RECORD indicates that the field contains a nested schema)."
- }
- }
- },
- "TableList": {
- "id": "TableList",
- "type": "object",
- "properties": {
- "etag": {
- "type": "string",
- "description": "A hash of this page of results."
- },
- "kind": {
- "type": "string",
- "description": "The type of list.",
- "default": "bigquery#tableList"
- },
- "nextPageToken": {
- "type": "string",
- "description": "A token to request the next page of results."
- },
- "tables": {
- "type": "array",
- "description": "Tables in the requested dataset.",
- "items": {
- "type": "object",
- "properties": {
- "friendlyName": {
- "type": "string",
- "description": "The user-friendly name for this table."
- },
- "id": {
- "type": "string",
- "description": "An opaque ID of the table"
- },
- "kind": {
- "type": "string",
- "description": "The resource type.",
- "default": "bigquery#table"
- },
- "tableReference": {
- "$ref": "TableReference",
- "description": "A reference uniquely identifying the table."
- },
- "type": {
- "type": "string",
- "description": "The type of table. Possible values are: TABLE, VIEW."
- }
- }
- }
- },
- "totalItems": {
- "type": "integer",
- "description": "The total number of tables in the dataset.",
- "format": "int32"
- }
- }
- },
- "TableReference": {
- "id": "TableReference",
- "type": "object",
- "properties": {
- "datasetId": {
- "type": "string",
- "description": "[Required] The ID of the dataset containing this table.",
- "annotations": {
- "required": [
- "bigquery.tables.update"
- ]
- }
- },
- "projectId": {
- "type": "string",
- "description": "[Required] The ID of the project containing this table.",
- "annotations": {
- "required": [
- "bigquery.tables.update"
- ]
- }
- },
- "tableId": {
- "type": "string",
- "description": "[Required] The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.",
- "annotations": {
- "required": [
- "bigquery.tables.update"
- ]
- }
- }
- }
- },
- "TableRow": {
- "id": "TableRow",
- "type": "object",
- "description": "Represents a single row in the result set, consisting of one or more fields.",
- "properties": {
- "f": {
- "type": "array",
- "items": {
- "$ref": "TableCell"
- }
- }
- }
- },
- "TableSchema": {
- "id": "TableSchema",
- "type": "object",
- "properties": {
- "fields": {
- "type": "array",
- "description": "Describes the fields in a table.",
- "items": {
- "$ref": "TableFieldSchema"
- }
- }
- }
- },
- "ViewDefinition": {
- "id": "ViewDefinition",
- "type": "object",
- "properties": {
- "query": {
- "type": "string",
- "description": "[Required] A query that BigQuery executes when the view is referenced."
- }
- }
- }
- },
- "resources": {
- "datasets": {
- "methods": {
- "delete": {
- "id": "bigquery.datasets.delete",
- "path": "projects/{projectId}/datasets/{datasetId}",
- "httpMethod": "DELETE",
- "description": "Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of dataset being deleted",
- "required": true,
- "location": "path"
- },
- "deleteContents": {
- "type": "boolean",
- "description": "If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the dataset being deleted",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "get": {
- "id": "bigquery.datasets.get",
- "path": "projects/{projectId}/datasets/{datasetId}",
- "httpMethod": "GET",
- "description": "Returns the dataset specified by datasetID.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the requested dataset",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the requested dataset",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "response": {
- "$ref": "Dataset"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "insert": {
- "id": "bigquery.datasets.insert",
- "path": "projects/{projectId}/datasets",
- "httpMethod": "POST",
- "description": "Creates a new empty dataset.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "Project ID of the new dataset",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "request": {
- "$ref": "Dataset"
- },
- "response": {
- "$ref": "Dataset"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "list": {
- "id": "bigquery.datasets.list",
- "path": "projects/{projectId}/datasets",
- "httpMethod": "GET",
- "description": "Lists all datasets in the specified project to which you have been granted the READER dataset role.",
- "parameters": {
- "all": {
- "type": "boolean",
- "description": "Whether to list all datasets, including hidden ones",
- "location": "query"
- },
- "maxResults": {
- "type": "integer",
- "description": "The maximum number of results to return",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, to request the next page of results",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the datasets to be listed",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "response": {
- "$ref": "DatasetList"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "patch": {
- "id": "bigquery.datasets.patch",
- "path": "projects/{projectId}/datasets/{datasetId}",
- "httpMethod": "PATCH",
- "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the dataset being updated",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the dataset being updated",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "request": {
- "$ref": "Dataset"
- },
- "response": {
- "$ref": "Dataset"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "update": {
- "id": "bigquery.datasets.update",
- "path": "projects/{projectId}/datasets/{datasetId}",
- "httpMethod": "PUT",
- "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the dataset being updated",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the dataset being updated",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "request": {
- "$ref": "Dataset"
- },
- "response": {
- "$ref": "Dataset"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "jobs": {
- "methods": {
- "get": {
- "id": "bigquery.jobs.get",
- "path": "projects/{projectId}/jobs/{jobId}",
- "httpMethod": "GET",
- "description": "Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.",
- "parameters": {
- "jobId": {
- "type": "string",
- "description": "Job ID of the requested job",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the requested job",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "jobId"
- ],
- "response": {
- "$ref": "Job"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "getQueryResults": {
- "id": "bigquery.jobs.getQueryResults",
- "path": "projects/{projectId}/queries/{jobId}",
- "httpMethod": "GET",
- "description": "Retrieves the results of a query job.",
- "parameters": {
- "jobId": {
- "type": "string",
- "description": "Job ID of the query job",
- "required": true,
- "location": "path"
- },
- "maxResults": {
- "type": "integer",
- "description": "Maximum number of results to read",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, to request the next page of results",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the query job",
- "required": true,
- "location": "path"
- },
- "startIndex": {
- "type": "string",
- "description": "Zero-based index of the starting row",
- "format": "uint64",
- "location": "query"
- },
- "timeoutMs": {
- "type": "integer",
- "description": "How long to wait for the query to complete, in milliseconds, before returning. Default is to return immediately. If the timeout passes before the job completes, the request will fail with a TIMEOUT error",
- "format": "uint32",
- "location": "query"
- }
- },
- "parameterOrder": [
- "projectId",
- "jobId"
- ],
- "response": {
- "$ref": "GetQueryResultsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "insert": {
- "id": "bigquery.jobs.insert",
- "path": "projects/{projectId}/jobs",
- "httpMethod": "POST",
- "description": "Starts a new asynchronous job. Requires the Can View project role.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "Project ID of the project that will be billed for the job",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "request": {
- "$ref": "Job"
- },
- "response": {
- "$ref": "Job"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/devstorage.full_control",
- "https://www.googleapis.com/auth/devstorage.read_only",
- "https://www.googleapis.com/auth/devstorage.read_write"
- ],
- "supportsMediaUpload": true,
- "mediaUpload": {
- "accept": [
- "*/*"
- ],
- "protocols": {
- "simple": {
- "multipart": true,
- "path": "/upload/bigquery/v2/projects/{projectId}/jobs"
- },
- "resumable": {
- "multipart": true,
- "path": "/resumable/upload/bigquery/v2/projects/{projectId}/jobs"
- }
- }
- }
- },
- "list": {
- "id": "bigquery.jobs.list",
- "path": "projects/{projectId}/jobs",
- "httpMethod": "GET",
- "description": "Lists all jobs that you started in the specified project. The job list returns in reverse chronological order of when the jobs were created, starting with the most recent job created. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.",
- "parameters": {
- "allUsers": {
- "type": "boolean",
- "description": "Whether to display jobs owned by all users in the project. Default false",
- "location": "query"
- },
- "maxResults": {
- "type": "integer",
- "description": "Maximum number of results to return",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, to request the next page of results",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the jobs to list",
- "required": true,
- "location": "path"
- },
- "projection": {
- "type": "string",
- "description": "Restrict information returned to a set of selected fields",
- "enum": [
- "full",
- "minimal"
- ],
- "enumDescriptions": [
- "Includes all job data",
- "Does not include the job configuration"
- ],
- "location": "query"
- },
- "stateFilter": {
- "type": "string",
- "description": "Filter for job state",
- "enum": [
- "done",
- "pending",
- "running"
- ],
- "enumDescriptions": [
- "Finished jobs",
- "Pending jobs",
- "Running jobs"
- ],
- "repeated": true,
- "location": "query"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "response": {
- "$ref": "JobList"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "query": {
- "id": "bigquery.jobs.query",
- "path": "projects/{projectId}/queries",
- "httpMethod": "POST",
- "description": "Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "Project ID of the project billed for the query",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "request": {
- "$ref": "QueryRequest"
- },
- "response": {
- "$ref": "QueryResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "projects": {
- "methods": {
- "list": {
- "id": "bigquery.projects.list",
- "path": "projects",
- "httpMethod": "GET",
- "description": "Lists all projects to which you have been granted any project role.",
- "parameters": {
- "maxResults": {
- "type": "integer",
- "description": "Maximum number of results to return",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, to request the next page of results",
- "location": "query"
- }
- },
- "response": {
- "$ref": "ProjectList"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "tabledata": {
- "methods": {
- "insertAll": {
- "id": "bigquery.tabledata.insertAll",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll",
- "httpMethod": "POST",
- "description": "Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the destination table.",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the destination table.",
- "required": true,
- "location": "path"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the destination table.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "request": {
- "$ref": "TableDataInsertAllRequest"
- },
- "response": {
- "$ref": "TableDataInsertAllResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/bigquery.insertdata",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "list": {
- "id": "bigquery.tabledata.list",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data",
- "httpMethod": "GET",
- "description": "Retrieves table data from a specified set of rows. Requires the READER dataset role.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the table to read",
- "required": true,
- "location": "path"
- },
- "maxResults": {
- "type": "integer",
- "description": "Maximum number of results to return",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, identifying the result set",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the table to read",
- "required": true,
- "location": "path"
- },
- "startIndex": {
- "type": "string",
- "description": "Zero-based index of the starting row to read",
- "format": "uint64",
- "location": "query"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the table to read",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "response": {
- "$ref": "TableDataList"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "tables": {
- "methods": {
- "delete": {
- "id": "bigquery.tables.delete",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- "httpMethod": "DELETE",
- "description": "Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the table to delete",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the table to delete",
- "required": true,
- "location": "path"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the table to delete",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "get": {
- "id": "bigquery.tables.get",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- "httpMethod": "GET",
- "description": "Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the requested table",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the requested table",
- "required": true,
- "location": "path"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the requested table",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "response": {
- "$ref": "Table"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "insert": {
- "id": "bigquery.tables.insert",
- "path": "projects/{projectId}/datasets/{datasetId}/tables",
- "httpMethod": "POST",
- "description": "Creates a new, empty table in the dataset.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the new table",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the new table",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "request": {
- "$ref": "Table"
- },
- "response": {
- "$ref": "Table"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "list": {
- "id": "bigquery.tables.list",
- "path": "projects/{projectId}/datasets/{datasetId}/tables",
- "httpMethod": "GET",
- "description": "Lists all tables in the specified dataset. Requires the READER dataset role.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the tables to list",
- "required": true,
- "location": "path"
- },
- "maxResults": {
- "type": "integer",
- "description": "Maximum number of results to return",
- "format": "uint32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "description": "Page token, returned by a previous call, to request the next page of results",
- "location": "query"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the tables to list",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId"
- ],
- "response": {
- "$ref": "TableList"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "patch": {
- "id": "bigquery.tables.patch",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- "httpMethod": "PATCH",
- "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the table to update",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the table to update",
- "required": true,
- "location": "path"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the table to update",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "request": {
- "$ref": "Table"
- },
- "response": {
- "$ref": "Table"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "update": {
- "id": "bigquery.tables.update",
- "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- "httpMethod": "PUT",
- "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.",
- "parameters": {
- "datasetId": {
- "type": "string",
- "description": "Dataset ID of the table to update",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "Project ID of the table to update",
- "required": true,
- "location": "path"
- },
- "tableId": {
- "type": "string",
- "description": "Table ID of the table to update",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "datasetId",
- "tableId"
- ],
- "request": {
- "$ref": "Table"
- },
- "response": {
- "$ref": "Table"
- },
- "scopes": [
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- }
- }
-}
diff --git a/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go b/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
deleted file mode 100644
index 4c6873d..0000000
--- a/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
+++ /dev/null
@@ -1,3531 +0,0 @@
-// Package bigquery provides access to the BigQuery API.
-//
-// See https://cloud.google.com/bigquery/
-//
-// Usage example:
-//
-// import "google.golang.org/api/bigquery/v2"
-// ...
-// bigqueryService, err := bigquery.New(oauthHttpClient)
-package bigquery
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "golang.org/x/net/context"
- "google.golang.org/api/googleapi"
- "io"
- "net/http"
- "net/url"
- "strconv"
- "strings"
-)
-
-// Always reference these packages, just in case the auto-generated code
-// below doesn't.
-var _ = bytes.NewBuffer
-var _ = strconv.Itoa
-var _ = fmt.Sprintf
-var _ = json.NewDecoder
-var _ = io.Copy
-var _ = url.Parse
-var _ = googleapi.Version
-var _ = errors.New
-var _ = strings.Replace
-var _ = context.Background
-
-const apiId = "bigquery:v2"
-const apiName = "bigquery"
-const apiVersion = "v2"
-const basePath = "https://www.googleapis.com/bigquery/v2/"
-
-// OAuth2 scopes used by this API.
-const (
- // View and manage your data in Google BigQuery
- BigqueryScope = "https://www.googleapis.com/auth/bigquery"
-
- // Insert data into Google BigQuery
- BigqueryInsertdataScope = "https://www.googleapis.com/auth/bigquery.insertdata"
-
- // View and manage your data across Google Cloud Platform services
- CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
-
- // Manage your data and permissions in Google Cloud Storage
- DevstorageFullControlScope = "https://www.googleapis.com/auth/devstorage.full_control"
-
- // View your data in Google Cloud Storage
- DevstorageReadOnlyScope = "https://www.googleapis.com/auth/devstorage.read_only"
-
- // Manage your data in Google Cloud Storage
- DevstorageReadWriteScope = "https://www.googleapis.com/auth/devstorage.read_write"
-)
-
-func New(client *http.Client) (*Service, error) {
- if client == nil {
- return nil, errors.New("client is nil")
- }
- s := &Service{client: client, BasePath: basePath}
- s.Datasets = NewDatasetsService(s)
- s.Jobs = NewJobsService(s)
- s.Projects = NewProjectsService(s)
- s.Tabledata = NewTabledataService(s)
- s.Tables = NewTablesService(s)
- return s, nil
-}
-
-type Service struct {
- client *http.Client
- BasePath string // API endpoint base URL
- UserAgent string // optional additional User-Agent fragment
-
- Datasets *DatasetsService
-
- Jobs *JobsService
-
- Projects *ProjectsService
-
- Tabledata *TabledataService
-
- Tables *TablesService
-}
-
-func (s *Service) userAgent() string {
- if s.UserAgent == "" {
- return googleapi.UserAgent
- }
- return googleapi.UserAgent + " " + s.UserAgent
-}
-
-func NewDatasetsService(s *Service) *DatasetsService {
- rs := &DatasetsService{s: s}
- return rs
-}
-
-type DatasetsService struct {
- s *Service
-}
-
-func NewJobsService(s *Service) *JobsService {
- rs := &JobsService{s: s}
- return rs
-}
-
-type JobsService struct {
- s *Service
-}
-
-func NewProjectsService(s *Service) *ProjectsService {
- rs := &ProjectsService{s: s}
- return rs
-}
-
-type ProjectsService struct {
- s *Service
-}
-
-func NewTabledataService(s *Service) *TabledataService {
- rs := &TabledataService{s: s}
- return rs
-}
-
-type TabledataService struct {
- s *Service
-}
-
-func NewTablesService(s *Service) *TablesService {
- rs := &TablesService{s: s}
- return rs
-}
-
-type TablesService struct {
- s *Service
-}
-
-type CsvOptions struct {
- // AllowJaggedRows: [Optional] Indicates if BigQuery should accept rows
- // that are missing trailing optional columns. If true, BigQuery treats
- // missing trailing columns as null values. If false, records with
- // missing trailing columns are treated as bad records, and if there are
- // too many bad records, an invalid error is returned in the job result.
- // The default value is false.
- AllowJaggedRows bool `json:"allowJaggedRows,omitempty"`
-
- // AllowQuotedNewlines: [Optional] Indicates if BigQuery should allow
- // quoted data sections that contain newline characters in a CSV file.
- // The default value is false.
- AllowQuotedNewlines bool `json:"allowQuotedNewlines,omitempty"`
-
- // Encoding: [Optional] The character encoding of the data. The
- // supported values are UTF-8 or ISO-8859-1. The default value is UTF-8.
- // BigQuery decodes the data after the raw, binary data has been split
- // using the values of the quote and fieldDelimiter properties.
- Encoding string `json:"encoding,omitempty"`
-
- // FieldDelimiter: [Optional] The separator for fields in a CSV file.
- // BigQuery converts the string to ISO-8859-1 encoding, and then uses
- // the first byte of the encoded string to split the data in its raw,
- // binary state. BigQuery also supports the escape sequence "\t" to
- // specify a tab separator. The default value is a comma (',').
- FieldDelimiter string `json:"fieldDelimiter,omitempty"`
-
- // Quote: [Optional] The value that is used to quote data sections in a
- // CSV file. BigQuery converts the string to ISO-8859-1 encoding, and
- // then uses the first byte of the encoded string to split the data in
- // its raw, binary state. The default value is a double-quote ('"'). If
- // your data does not contain quoted sections, set the property value to
- // an empty string. If your data contains quoted newline characters, you
- // must also set the allowQuotedNewlines property to true.
- Quote string `json:"quote,omitempty"`
-
- // SkipLeadingRows: [Optional] The number of rows at the top of a CSV
- // file that BigQuery will skip when reading the data. The default value
- // is 0. This property is useful if you have header rows in the file
- // that should be skipped.
- SkipLeadingRows int64 `json:"skipLeadingRows,omitempty"`
-}
-
-type Dataset struct {
- // Access: [Optional] An array of objects that define dataset access for
- // one or more entities. You can set this property when inserting or
- // updating a dataset in order to control who is allowed to access the
- // data. If unspecified at dataset creation time, BigQuery adds default
- // dataset access for the following entities: access.specialGroup:
- // projectReaders; access.role: READER; access.specialGroup:
- // projectWriters; access.role: WRITER; access.specialGroup:
- // projectOwners; access.role: OWNER; access.userByEmail: [dataset
- // creator email]; access.role: OWNER;
- Access []*DatasetAccess `json:"access,omitempty"`
-
- // CreationTime: [Output-only] The time when this dataset was created,
- // in milliseconds since the epoch.
- CreationTime int64 `json:"creationTime,omitempty,string"`
-
- // DatasetReference: [Required] A reference that identifies the dataset.
- DatasetReference *DatasetReference `json:"datasetReference,omitempty"`
-
- // DefaultTableExpirationMs: [Experimental] The default lifetime of all
- // tables in the dataset, in milliseconds. The minimum value is 3600000
- // milliseconds (one hour). Once this property is set, all newly-created
- // tables in the dataset will have an expirationTime property set to the
- // creation time plus the value in this property, and changing the value
- // will only affect new tables, not existing ones. When the
- // expirationTime for a given table is reached, that table will be
- // deleted automatically. If a table's expirationTime is modified or
- // removed before the table expires, or if you provide an explicit
- // expirationTime when creating a table, that value takes precedence
- // over the default expiration time indicated by this property.
- DefaultTableExpirationMs int64 `json:"defaultTableExpirationMs,omitempty,string"`
-
- // Description: [Optional] A user-friendly description of the dataset.
- Description string `json:"description,omitempty"`
-
- // Etag: [Output-only] A hash of the resource.
- Etag string `json:"etag,omitempty"`
-
- // FriendlyName: [Optional] A descriptive name for the dataset.
- FriendlyName string `json:"friendlyName,omitempty"`
-
- // Id: [Output-only] The fully-qualified unique name of the dataset in
- // the format projectId:datasetId. The dataset name without the project
- // name is given in the datasetId field. When creating a new dataset,
- // leave this field blank, and instead specify the datasetId field.
- Id string `json:"id,omitempty"`
-
- // Kind: [Output-only] The resource type.
- Kind string `json:"kind,omitempty"`
-
- // LastModifiedTime: [Output-only] The date when this dataset or any of
- // its tables was last modified, in milliseconds since the epoch.
- LastModifiedTime int64 `json:"lastModifiedTime,omitempty,string"`
-
- // Location: [Experimental] The location where the data resides. If not
- // present, the data will be stored in the US.
- Location string `json:"location,omitempty"`
-
- // SelfLink: [Output-only] A URL that can be used to access the resource
- // again. You can use this URL in Get or Update requests to the
- // resource.
- SelfLink string `json:"selfLink,omitempty"`
-}
-
-type DatasetAccess struct {
- // Domain: [Pick one] A domain to grant access to. Any users signed in
- // with the domain specified will be granted the specified access.
- // Example: "example.com".
- Domain string `json:"domain,omitempty"`
-
- // GroupByEmail: [Pick one] An email address of a Google Group to grant
- // access to.
- GroupByEmail string `json:"groupByEmail,omitempty"`
-
- // Role: [Required] Describes the rights granted to the user specified
- // by the other member of the access object. The following string values
- // are supported: READER, WRITER, OWNER.
- Role string `json:"role,omitempty"`
-
- // SpecialGroup: [Pick one] A special group to grant access to. Possible
- // values include: projectOwners: Owners of the enclosing project.
- // projectReaders: Readers of the enclosing project. projectWriters:
- // Writers of the enclosing project. allAuthenticatedUsers: All
- // authenticated BigQuery users.
- SpecialGroup string `json:"specialGroup,omitempty"`
-
- // UserByEmail: [Pick one] An email address of a user to grant access
- // to. For example: fred@example.com.
- UserByEmail string `json:"userByEmail,omitempty"`
-
- // View: [Pick one] A view from a different dataset to grant access to.
- // Queries executed against that view will have read access to tables in
- // this dataset. The role field is not required when this field is set.
- // If that view is updated by any user, access to the view needs to be
- // granted again via an update operation.
- View *TableReference `json:"view,omitempty"`
-}
-
-type DatasetList struct {
- // Datasets: An array of the dataset resources in the project. Each
- // resource contains basic information. For full information about a
- // particular dataset resource, use the Datasets: get method. This
- // property is omitted when there are no datasets in the project.
- Datasets []*DatasetListDatasets `json:"datasets,omitempty"`
-
- // Etag: A hash value of the results page. You can use this property to
- // determine if the page has changed since the last request.
- Etag string `json:"etag,omitempty"`
-
- // Kind: The list type. This property always returns the value
- // "bigquery#datasetList".
- Kind string `json:"kind,omitempty"`
-
- // NextPageToken: A token that can be used to request the next results
- // page. This property is omitted on the final results page.
- NextPageToken string `json:"nextPageToken,omitempty"`
-}
-
-type DatasetListDatasets struct {
- // DatasetReference: The dataset reference. Use this property to access
- // specific parts of the dataset's ID, such as project ID or dataset ID.
- DatasetReference *DatasetReference `json:"datasetReference,omitempty"`
-
- // FriendlyName: A descriptive name for the dataset, if one exists.
- FriendlyName string `json:"friendlyName,omitempty"`
-
- // Id: The fully-qualified, unique, opaque ID of the dataset.
- Id string `json:"id,omitempty"`
-
- // Kind: The resource type. This property always returns the value
- // "bigquery#dataset".
- Kind string `json:"kind,omitempty"`
-}
-
-type DatasetReference struct {
- // DatasetId: [Required] A unique ID for this dataset, without the
- // project name. The ID must contain only letters (a-z, A-Z), numbers
- // (0-9), or underscores (_). The maximum length is 1,024 characters.
- DatasetId string `json:"datasetId,omitempty"`
-
- // ProjectId: [Optional] The ID of the project containing this dataset.
- ProjectId string `json:"projectId,omitempty"`
-}
-
-type ErrorProto struct {
- // DebugInfo: Debugging information. This property is internal to Google
- // and should not be used.
- DebugInfo string `json:"debugInfo,omitempty"`
-
- // Location: Specifies where the error occurred, if present.
- Location string `json:"location,omitempty"`
-
- // Message: A human-readable description of the error.
- Message string `json:"message,omitempty"`
-
- // Reason: A short error code that summarizes the error.
- Reason string `json:"reason,omitempty"`
-}
-
-type ExternalDataConfiguration struct {
- // Compression: [Optional] The compression type of the data source.
- // Possible values include GZIP and NONE. The default value is NONE.
- Compression string `json:"compression,omitempty"`
-
- // CsvOptions: Additional properties to set if sourceFormat is set to
- // CSV.
- CsvOptions *CsvOptions `json:"csvOptions,omitempty"`
-
- // IgnoreUnknownValues: [Optional] Indicates if BigQuery should allow
- // extra values that are not represented in the table schema. If true,
- // the extra values are ignored. If false, records with extra columns
- // are treated as bad records, and if there are too many bad records, an
- // invalid error is returned in the job result. The default value is
- // false. The sourceFormat property determines what BigQuery treats as
- // an extra value: CSV: Trailing columns
- IgnoreUnknownValues bool `json:"ignoreUnknownValues,omitempty"`
-
- // MaxBadRecords: [Optional] The maximum number of bad records that
- // BigQuery can ignore when reading data. If the number of bad records
- // exceeds this value, an invalid error is returned in the job result.
- // The default value is 0, which requires that all records are valid.
- MaxBadRecords int64 `json:"maxBadRecords,omitempty"`
-
- // Schema: [Required] The schema for the data.
- Schema *TableSchema `json:"schema,omitempty"`
-
- // SourceFormat: [Optional] The data format. External data sources must
- // be in CSV format. The default value is CSV.
- SourceFormat string `json:"sourceFormat,omitempty"`
-
- // SourceUris: [Required] The fully-qualified URIs that point to your
- // data in Google Cloud Storage. Each URI can contain one '*' wildcard
- // character and it must come after the 'bucket' name. CSV limits
- // related to load jobs apply to external data sources, plus an
- // additional limit of 10 GB maximum size across all URIs.
- SourceUris []string `json:"sourceUris,omitempty"`
-}
-
-type GetQueryResultsResponse struct {
- // CacheHit: Whether the query result was fetched from the query cache.
- CacheHit bool `json:"cacheHit,omitempty"`
-
- // Etag: A hash of this response.
- Etag string `json:"etag,omitempty"`
-
- // JobComplete: Whether the query has completed or not. If rows or
- // totalRows are present, this will always be true. If this is false,
- // totalRows will not be available.
- JobComplete bool `json:"jobComplete,omitempty"`
-
- // JobReference: Reference to the BigQuery Job that was created to run
- // the query. This field will be present even if the original request
- // timed out, in which case GetQueryResults can be used to read the
- // results once the query has completed. Since this API only returns the
- // first page of results, subsequent pages can be fetched via the same
- // mechanism (GetQueryResults).
- JobReference *JobReference `json:"jobReference,omitempty"`
-
- // Kind: The resource type of the response.
- Kind string `json:"kind,omitempty"`
-
- // PageToken: A token used for paging results.
- PageToken string `json:"pageToken,omitempty"`
-
- // Rows: An object with as many results as can be contained within the
- // maximum permitted reply size. To get any additional rows, you can
- // call GetQueryResults and specify the jobReference returned above.
- // Present only when the query completes successfully.
- Rows []*TableRow `json:"rows,omitempty"`
-
- // Schema: The schema of the results. Present only when the query
- // completes successfully.
- Schema *TableSchema `json:"schema,omitempty"`
-
- // TotalBytesProcessed: The total number of bytes processed for this
- // query.
- TotalBytesProcessed int64 `json:"totalBytesProcessed,omitempty,string"`
-
- // TotalRows: The total number of rows in the complete query result set,
- // which can be more than the number of rows in this single page of
- // results. Present only when the query completes successfully.
- TotalRows uint64 `json:"totalRows,omitempty,string"`
-}
-
-type Job struct {
- // Configuration: [Required] Describes the job configuration.
- Configuration *JobConfiguration `json:"configuration,omitempty"`
-
- // Etag: [Output-only] A hash of this resource.
- Etag string `json:"etag,omitempty"`
-
- // Id: [Output-only] Opaque ID field of the job
- Id string `json:"id,omitempty"`
-
- // JobReference: [Optional] Reference describing the unique-per-user
- // name of the job.
- JobReference *JobReference `json:"jobReference,omitempty"`
-
- // Kind: [Output-only] The type of the resource.
- Kind string `json:"kind,omitempty"`
-
- // SelfLink: [Output-only] A URL that can be used to access this
- // resource again.
- SelfLink string `json:"selfLink,omitempty"`
-
- // Statistics: [Output-only] Information about the job, including
- // starting time and ending time of the job.
- Statistics *JobStatistics `json:"statistics,omitempty"`
-
- // Status: [Output-only] The status of this job. Examine this value when
- // polling an asynchronous job to see if the job is complete.
- Status *JobStatus `json:"status,omitempty"`
-
- // UserEmail: [Output-only] Email address of the user who ran the job.
- UserEmail string `json:"user_email,omitempty"`
-}
-
-type JobConfiguration struct {
- // Copy: [Pick one] Copies a table.
- Copy *JobConfigurationTableCopy `json:"copy,omitempty"`
-
- // DryRun: [Optional] If set, don't actually run this job. A valid query
- // will return a mostly empty response with some processing statistics,
- // while an invalid query will return the same error it would if it
- // wasn't a dry run. Behavior of non-query jobs is undefined.
- DryRun bool `json:"dryRun,omitempty"`
-
- // Extract: [Pick one] Configures an extract job.
- Extract *JobConfigurationExtract `json:"extract,omitempty"`
-
- // Link: [Pick one] Configures a link job.
- Link *JobConfigurationLink `json:"link,omitempty"`
-
- // Load: [Pick one] Configures a load job.
- Load *JobConfigurationLoad `json:"load,omitempty"`
-
- // Query: [Pick one] Configures a query job.
- Query *JobConfigurationQuery `json:"query,omitempty"`
-}
-
-type JobConfigurationExtract struct {
- // Compression: [Optional] The compression type to use for exported
- // files. Possible values include GZIP and NONE. The default value is
- // NONE.
- Compression string `json:"compression,omitempty"`
-
- // DestinationFormat: [Optional] The exported file format. Possible
- // values include CSV, NEWLINE_DELIMITED_JSON and AVRO. The default
- // value is CSV. Tables with nested or repeated fields cannot be
- // exported as CSV.
- DestinationFormat string `json:"destinationFormat,omitempty"`
-
- // DestinationUri: [Pick one] DEPRECATED: Use destinationUris instead,
- // passing only one URI as necessary. The fully-qualified Google Cloud
- // Storage URI where the extracted table should be written.
- DestinationUri string `json:"destinationUri,omitempty"`
-
- // DestinationUris: [Pick one] A list of fully-qualified Google Cloud
- // Storage URIs where the extracted table should be written.
- DestinationUris []string `json:"destinationUris,omitempty"`
-
- // FieldDelimiter: [Optional] Delimiter to use between fields in the
- // exported data. Default is ','
- FieldDelimiter string `json:"fieldDelimiter,omitempty"`
-
- // PrintHeader: [Optional] Whether to print out a header row in the
- // results. Default is true.
- PrintHeader bool `json:"printHeader,omitempty"`
-
- // SourceTable: [Required] A reference to the table being exported.
- SourceTable *TableReference `json:"sourceTable,omitempty"`
-}
-
-type JobConfigurationLink struct {
- // CreateDisposition: [Optional] Specifies whether the job is allowed to
- // create new tables. The following values are supported:
- // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the
- // table. CREATE_NEVER: The table must already exist. If it does not, a
- // 'notFound' error is returned in the job result. The default value is
- // CREATE_IF_NEEDED. Creation, truncation and append actions occur as
- // one atomic update upon job completion.
- CreateDisposition string `json:"createDisposition,omitempty"`
-
- // DestinationTable: [Required] The destination table of the link job.
- DestinationTable *TableReference `json:"destinationTable,omitempty"`
-
- // SourceUri: [Required] URI of source table to link.
- SourceUri []string `json:"sourceUri,omitempty"`
-
- // WriteDisposition: [Optional] Specifies the action that occurs if the
- // destination table already exists. The following values are supported:
- // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
- // table data. WRITE_APPEND: If the table already exists, BigQuery
- // appends the data to the table. WRITE_EMPTY: If the table already
- // exists and contains data, a 'duplicate' error is returned in the job
- // result. The default value is WRITE_EMPTY. Each action is atomic and
- // only occurs if BigQuery is able to complete the job successfully.
- // Creation, truncation and append actions occur as one atomic update
- // upon job completion.
- WriteDisposition string `json:"writeDisposition,omitempty"`
-}
-
-type JobConfigurationLoad struct {
- // AllowJaggedRows: [Optional] Accept rows that are missing trailing
- // optional columns. The missing values are treated as nulls. If false,
- // records with missing trailing columns are treated as bad records, and
- // if there are too many bad records, an invalid error is returned in
- // the job result. The default value is false. Only applicable to CSV,
- // ignored for other formats.
- AllowJaggedRows bool `json:"allowJaggedRows,omitempty"`
-
- // AllowQuotedNewlines: Indicates if BigQuery should allow quoted data
- // sections that contain newline characters in a CSV file. The default
- // value is false.
- AllowQuotedNewlines bool `json:"allowQuotedNewlines,omitempty"`
-
- // CreateDisposition: [Optional] Specifies whether the job is allowed to
- // create new tables. The following values are supported:
- // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the
- // table. CREATE_NEVER: The table must already exist. If it does not, a
- // 'notFound' error is returned in the job result. The default value is
- // CREATE_IF_NEEDED. Creation, truncation and append actions occur as
- // one atomic update upon job completion.
- CreateDisposition string `json:"createDisposition,omitempty"`
-
- // DestinationTable: [Required] The destination table to load the data
- // into.
- DestinationTable *TableReference `json:"destinationTable,omitempty"`
-
- // Encoding: [Optional] The character encoding of the data. The
- // supported values are UTF-8 or ISO-8859-1. The default value is UTF-8.
- // BigQuery decodes the data after the raw, binary data has been split
- // using the values of the quote and fieldDelimiter properties.
- Encoding string `json:"encoding,omitempty"`
-
- // FieldDelimiter: [Optional] The separator for fields in a CSV file.
- // BigQuery converts the string to ISO-8859-1 encoding, and then uses
- // the first byte of the encoded string to split the data in its raw,
- // binary state. BigQuery also supports the escape sequence "\t" to
- // specify a tab separator. The default value is a comma (',').
- FieldDelimiter string `json:"fieldDelimiter,omitempty"`
-
- // IgnoreUnknownValues: [Optional] Indicates if BigQuery should allow
- // extra values that are not represented in the table schema. If true,
- // the extra values are ignored. If false, records with extra columns
- // are treated as bad records, and if there are too many bad records, an
- // invalid error is returned in the job result. The default value is
- // false. The sourceFormat property determines what BigQuery treats as
- // an extra value: CSV: Trailing columns JSON: Named values that don't
- // match any column names
- IgnoreUnknownValues bool `json:"ignoreUnknownValues,omitempty"`
-
- // MaxBadRecords: [Optional] The maximum number of bad records that
- // BigQuery can ignore when running the job. If the number of bad
- // records exceeds this value, an invalid error is returned in the job
- // result. The default value is 0, which requires that all records are
- // valid.
- MaxBadRecords int64 `json:"maxBadRecords,omitempty"`
-
- // ProjectionFields: [Experimental] If sourceFormat is set to
- // "DATASTORE_BACKUP", indicates which entity properties to load into
- // BigQuery from a Cloud Datastore backup. Property names are case
- // sensitive and must be top-level properties. If no properties are
- // specified, BigQuery loads all properties. If any named property isn't
- // found in the Cloud Datastore backup, an invalid error is returned in
- // the job result.
- ProjectionFields []string `json:"projectionFields,omitempty"`
-
- // Quote: [Optional] The value that is used to quote data sections in a
- // CSV file. BigQuery converts the string to ISO-8859-1 encoding, and
- // then uses the first byte of the encoded string to split the data in
- // its raw, binary state. The default value is a double-quote ('"'). If
- // your data does not contain quoted sections, set the property value to
- // an empty string. If your data contains quoted newline characters, you
- // must also set the allowQuotedNewlines property to true.
- Quote string `json:"quote,omitempty"`
-
- // Schema: [Optional] The schema for the destination table. The schema
- // can be omitted if the destination table already exists or if the
- // schema can be inferred from the loaded data.
- Schema *TableSchema `json:"schema,omitempty"`
-
- // SchemaInline: [Deprecated] The inline schema. For CSV schemas,
- // specify as "Field1:Type1[,Field2:Type2]*". For example, "foo:STRING,
- // bar:INTEGER, baz:FLOAT".
- SchemaInline string `json:"schemaInline,omitempty"`
-
- // SchemaInlineFormat: [Deprecated] The format of the schemaInline
- // property.
- SchemaInlineFormat string `json:"schemaInlineFormat,omitempty"`
-
- // SkipLeadingRows: [Optional] The number of rows at the top of a CSV
- // file that BigQuery will skip when loading the data. The default value
- // is 0. This property is useful if you have header rows in the file
- // that should be skipped.
- SkipLeadingRows int64 `json:"skipLeadingRows,omitempty"`
-
- // SourceFormat: [Optional] The format of the data files. For CSV files,
- // specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". For
- // newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". The default
- // value is CSV.
- SourceFormat string `json:"sourceFormat,omitempty"`
-
- // SourceUris: [Required] The fully-qualified URIs that point to your
- // data in Google Cloud Storage. Each URI can contain one '*' wildcard
- // character and it must come after the 'bucket' name.
- SourceUris []string `json:"sourceUris,omitempty"`
-
- // WriteDisposition: [Optional] Specifies the action that occurs if the
- // destination table already exists. The following values are supported:
- // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
- // table data. WRITE_APPEND: If the table already exists, BigQuery
- // appends the data to the table. WRITE_EMPTY: If the table already
- // exists and contains data, a 'duplicate' error is returned in the job
- // result. The default value is WRITE_EMPTY. Each action is atomic and
- // only occurs if BigQuery is able to complete the job successfully.
- // Creation, truncation and append actions occur as one atomic update
- // upon job completion.
- WriteDisposition string `json:"writeDisposition,omitempty"`
-}
-
-type JobConfigurationQuery struct {
- // AllowLargeResults: If true, allows the query to produce arbitrarily
- // large result tables at a slight cost in performance. Requires
- // destinationTable to be set.
- AllowLargeResults bool `json:"allowLargeResults,omitempty"`
-
- // CreateDisposition: [Optional] Specifies whether the job is allowed to
- // create new tables. The following values are supported:
- // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the
- // table. CREATE_NEVER: The table must already exist. If it does not, a
- // 'notFound' error is returned in the job result. The default value is
- // CREATE_IF_NEEDED. Creation, truncation and append actions occur as
- // one atomic update upon job completion.
- CreateDisposition string `json:"createDisposition,omitempty"`
-
- // DefaultDataset: [Optional] Specifies the default dataset to use for
- // unqualified table names in the query.
- DefaultDataset *DatasetReference `json:"defaultDataset,omitempty"`
-
- // DestinationTable: [Optional] Describes the table where the query
- // results should be stored. If not present, a new table will be created
- // to store the results.
- DestinationTable *TableReference `json:"destinationTable,omitempty"`
-
- // FlattenResults: [Optional] Flattens all nested and repeated fields in
- // the query results. The default value is true. allowLargeResults must
- // be true if this is set to false.
- FlattenResults bool `json:"flattenResults,omitempty"`
-
- // PreserveNulls: [Deprecated] This property is deprecated.
- PreserveNulls bool `json:"preserveNulls,omitempty"`
-
- // Priority: [Optional] Specifies a priority for the query. Possible
- // values include INTERACTIVE and BATCH. The default value is
- // INTERACTIVE.
- Priority string `json:"priority,omitempty"`
-
- // Query: [Required] BigQuery SQL query to execute.
- Query string `json:"query,omitempty"`
-
- // TableDefinitions: [Experimental] If querying an external data source
- // outside of BigQuery, describes the data format, location and other
- // properties of the data source. By defining these properties, the data
- // source can then be queried as if it were a standard BigQuery table.
- TableDefinitions map[string]ExternalDataConfiguration `json:"tableDefinitions,omitempty"`
-
- // UseQueryCache: [Optional] Whether to look for the result in the query
- // cache. The query cache is a best-effort cache that will be flushed
- // whenever tables in the query are modified. Moreover, the query cache
- // is only available when a query does not have a destination table
- // specified.
- UseQueryCache bool `json:"useQueryCache,omitempty"`
-
- // WriteDisposition: [Optional] Specifies the action that occurs if the
- // destination table already exists. The following values are supported:
- // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
- // table data. WRITE_APPEND: If the table already exists, BigQuery
- // appends the data to the table. WRITE_EMPTY: If the table already
- // exists and contains data, a 'duplicate' error is returned in the job
- // result. The default value is WRITE_EMPTY. Each action is atomic and
- // only occurs if BigQuery is able to complete the job successfully.
- // Creation, truncation and append actions occur as one atomic update
- // upon job completion.
- WriteDisposition string `json:"writeDisposition,omitempty"`
-}
-
-type JobConfigurationTableCopy struct {
- // CreateDisposition: [Optional] Specifies whether the job is allowed to
- // create new tables. The following values are supported:
- // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the
- // table. CREATE_NEVER: The table must already exist. If it does not, a
- // 'notFound' error is returned in the job result. The default value is
- // CREATE_IF_NEEDED. Creation, truncation and append actions occur as
- // one atomic update upon job completion.
- CreateDisposition string `json:"createDisposition,omitempty"`
-
- // DestinationTable: [Required] The destination table
- DestinationTable *TableReference `json:"destinationTable,omitempty"`
-
- // SourceTable: [Pick one] Source table to copy.
- SourceTable *TableReference `json:"sourceTable,omitempty"`
-
- // SourceTables: [Pick one] Source tables to copy.
- SourceTables []*TableReference `json:"sourceTables,omitempty"`
-
- // WriteDisposition: [Optional] Specifies the action that occurs if the
- // destination table already exists. The following values are supported:
- // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
- // table data. WRITE_APPEND: If the table already exists, BigQuery
- // appends the data to the table. WRITE_EMPTY: If the table already
- // exists and contains data, a 'duplicate' error is returned in the job
- // result. The default value is WRITE_EMPTY. Each action is atomic and
- // only occurs if BigQuery is able to complete the job successfully.
- // Creation, truncation and append actions occur as one atomic update
- // upon job completion.
- WriteDisposition string `json:"writeDisposition,omitempty"`
-}
-
-type JobList struct {
- // Etag: A hash of this page of results.
- Etag string `json:"etag,omitempty"`
-
- // Jobs: List of jobs that were requested.
- Jobs []*JobListJobs `json:"jobs,omitempty"`
-
- // Kind: The resource type of the response.
- Kind string `json:"kind,omitempty"`
-
- // NextPageToken: A token to request the next page of results.
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- // TotalItems: Total number of jobs in this collection.
- TotalItems int64 `json:"totalItems,omitempty"`
-}
-
-type JobListJobs struct {
- // Configuration: [Full-projection-only] Specifies the job
- // configuration.
- Configuration *JobConfiguration `json:"configuration,omitempty"`
-
- // ErrorResult: A result object that will be present only if the job has
- // failed.
- ErrorResult *ErrorProto `json:"errorResult,omitempty"`
-
- // Id: Unique opaque ID of the job.
- Id string `json:"id,omitempty"`
-
- // JobReference: Job reference uniquely identifying the job.
- JobReference *JobReference `json:"jobReference,omitempty"`
-
- // Kind: The resource type.
- Kind string `json:"kind,omitempty"`
-
- // State: Running state of the job. When the state is DONE, errorResult
- // can be checked to determine whether the job succeeded or failed.
- State string `json:"state,omitempty"`
-
- // Statistics: [Output-only] Information about the job, including
- // starting time and ending time of the job.
- Statistics *JobStatistics `json:"statistics,omitempty"`
-
- // Status: [Full-projection-only] Describes the state of the job.
- Status *JobStatus `json:"status,omitempty"`
-
- // UserEmail: [Full-projection-only] Email address of the user who ran
- // the job.
- UserEmail string `json:"user_email,omitempty"`
-}
-
-type JobReference struct {
- // JobId: [Required] The ID of the job. The ID must contain only letters
- // (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The
- // maximum length is 1,024 characters.
- JobId string `json:"jobId,omitempty"`
-
- // ProjectId: [Required] The ID of the project containing this job.
- ProjectId string `json:"projectId,omitempty"`
-}
-
-type JobStatistics struct {
- // CreationTime: [Output-only] Creation time of this job, in
- // milliseconds since the epoch. This field will be present on all jobs.
- CreationTime int64 `json:"creationTime,omitempty,string"`
-
- // EndTime: [Output-only] End time of this job, in milliseconds since
- // the epoch. This field will be present whenever a job is in the DONE
- // state.
- EndTime int64 `json:"endTime,omitempty,string"`
-
- // Extract: [Output-only] Statistics for an extract job.
- Extract *JobStatistics4 `json:"extract,omitempty"`
-
- // Load: [Output-only] Statistics for a load job.
- Load *JobStatistics3 `json:"load,omitempty"`
-
- // Query: [Output-only] Statistics for a query job.
- Query *JobStatistics2 `json:"query,omitempty"`
-
- // StartTime: [Output-only] Start time of this job, in milliseconds
- // since the epoch. This field will be present when the job transitions
- // from the PENDING state to either RUNNING or DONE.
- StartTime int64 `json:"startTime,omitempty,string"`
-
- // TotalBytesProcessed: [Output-only] [Deprecated] Use the bytes
- // processed in the query statistics instead.
- TotalBytesProcessed int64 `json:"totalBytesProcessed,omitempty,string"`
-}
-
-type JobStatistics2 struct {
- // CacheHit: [Output-only] Whether the query result was fetched from the
- // query cache.
- CacheHit bool `json:"cacheHit,omitempty"`
-
- // TotalBytesProcessed: [Output-only] Total bytes processed for this
- // job.
- TotalBytesProcessed int64 `json:"totalBytesProcessed,omitempty,string"`
-}
-
-type JobStatistics3 struct {
- // InputFileBytes: [Output-only] Number of bytes of source data in a
- // joad job.
- InputFileBytes int64 `json:"inputFileBytes,omitempty,string"`
-
- // InputFiles: [Output-only] Number of source files in a load job.
- InputFiles int64 `json:"inputFiles,omitempty,string"`
-
- // OutputBytes: [Output-only] Size of the loaded data in bytes. Note
- // that while an import job is in the running state, this value may
- // change.
- OutputBytes int64 `json:"outputBytes,omitempty,string"`
-
- // OutputRows: [Output-only] Number of rows imported in a load job. Note
- // that while an import job is in the running state, this value may
- // change.
- OutputRows int64 `json:"outputRows,omitempty,string"`
-}
-
-type JobStatistics4 struct {
- // DestinationUriFileCounts: [Experimental] Number of files per
- // destination URI or URI pattern specified in the extract
- // configuration. These values will be in the same order as the URIs
- // specified in the 'destinationUris' field.
- DestinationUriFileCounts googleapi.Int64s `json:"destinationUriFileCounts,omitempty"`
-}
-
-type JobStatus struct {
- // ErrorResult: [Output-only] Final error result of the job. If present,
- // indicates that the job has completed and was unsuccessful.
- ErrorResult *ErrorProto `json:"errorResult,omitempty"`
-
- // Errors: [Output-only] All errors encountered during the running of
- // the job. Errors here do not necessarily mean that the job has
- // completed or was unsuccessful.
- Errors []*ErrorProto `json:"errors,omitempty"`
-
- // State: [Output-only] Running state of the job.
- State string `json:"state,omitempty"`
-}
-
-type JsonValue interface{}
-
-type ProjectList struct {
- // Etag: A hash of the page of results
- Etag string `json:"etag,omitempty"`
-
- // Kind: The type of list.
- Kind string `json:"kind,omitempty"`
-
- // NextPageToken: A token to request the next page of results.
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- // Projects: Projects to which you have at least READ access.
- Projects []*ProjectListProjects `json:"projects,omitempty"`
-
- // TotalItems: The total number of projects in the list.
- TotalItems int64 `json:"totalItems,omitempty"`
-}
-
-type ProjectListProjects struct {
- // FriendlyName: A descriptive name for this project.
- FriendlyName string `json:"friendlyName,omitempty"`
-
- // Id: An opaque ID of this project.
- Id string `json:"id,omitempty"`
-
- // Kind: The resource type.
- Kind string `json:"kind,omitempty"`
-
- // NumericId: The numeric ID of this project.
- NumericId uint64 `json:"numericId,omitempty,string"`
-
- // ProjectReference: A unique reference to this project.
- ProjectReference *ProjectReference `json:"projectReference,omitempty"`
-}
-
-type ProjectReference struct {
- // ProjectId: [Required] ID of the project. Can be either the numeric ID
- // or the assigned ID of the project.
- ProjectId string `json:"projectId,omitempty"`
-}
-
-type QueryRequest struct {
- // DefaultDataset: [Optional] Specifies the default datasetId and
- // projectId to assume for any unqualified table names in the query. If
- // not set, all table names in the query string must be qualified in the
- // format 'datasetId.tableId'.
- DefaultDataset *DatasetReference `json:"defaultDataset,omitempty"`
-
- // DryRun: [Optional] If set, don't actually run this job. A valid query
- // will return a mostly empty response with some processing statistics,
- // while an invalid query will return the same error it would if it
- // wasn't a dry run.
- DryRun bool `json:"dryRun,omitempty"`
-
- // Kind: The resource type of the request.
- Kind string `json:"kind,omitempty"`
-
- // MaxResults: [Optional] The maximum number of rows of data to return
- // per page of results. Setting this flag to a small value such as 1000
- // and then paging through results might improve reliability when the
- // query result set is large. In addition to this limit, responses are
- // also limited to 10 MB. By default, there is no maximum row count, and
- // only the byte limit applies.
- MaxResults int64 `json:"maxResults,omitempty"`
-
- // PreserveNulls: [Deprecated] This property is deprecated.
- PreserveNulls bool `json:"preserveNulls,omitempty"`
-
- // Query: [Required] A query string, following the BigQuery query
- // syntax, of the query to execute. Example: "SELECT count(f1) FROM
- // [myProjectId:myDatasetId.myTableId]".
- Query string `json:"query,omitempty"`
-
- // TimeoutMs: [Optional] How long to wait for the query to complete, in
- // milliseconds, before the request times out and returns. Note that
- // this is only a timeout for the request, not the query. If the query
- // takes longer to run than the timeout value, the call returns without
- // any results and with the 'jobComplete' flag set to false. You can
- // call GetQueryResults() to wait for the query to complete and read the
- // results. The default value is 10000 milliseconds (10 seconds).
- TimeoutMs int64 `json:"timeoutMs,omitempty"`
-
- // UseQueryCache: [Optional] Whether to look for the result in the query
- // cache. The query cache is a best-effort cache that will be flushed
- // whenever tables in the query are modified. The default value is true.
- UseQueryCache bool `json:"useQueryCache,omitempty"`
-}
-
-type QueryResponse struct {
- // CacheHit: Whether the query result was fetched from the query cache.
- CacheHit bool `json:"cacheHit,omitempty"`
-
- // JobComplete: Whether the query has completed or not. If rows or
- // totalRows are present, this will always be true. If this is false,
- // totalRows will not be available.
- JobComplete bool `json:"jobComplete,omitempty"`
-
- // JobReference: Reference to the Job that was created to run the query.
- // This field will be present even if the original request timed out, in
- // which case GetQueryResults can be used to read the results once the
- // query has completed. Since this API only returns the first page of
- // results, subsequent pages can be fetched via the same mechanism
- // (GetQueryResults).
- JobReference *JobReference `json:"jobReference,omitempty"`
-
- // Kind: The resource type.
- Kind string `json:"kind,omitempty"`
-
- // PageToken: A token used for paging results.
- PageToken string `json:"pageToken,omitempty"`
-
- // Rows: An object with as many results as can be contained within the
- // maximum permitted reply size. To get any additional rows, you can
- // call GetQueryResults and specify the jobReference returned above.
- Rows []*TableRow `json:"rows,omitempty"`
-
- // Schema: The schema of the results. Present only when the query
- // completes successfully.
- Schema *TableSchema `json:"schema,omitempty"`
-
- // TotalBytesProcessed: The total number of bytes processed for this
- // query. If this query was a dry run, this is the number of bytes that
- // would be processed if the query were run.
- TotalBytesProcessed int64 `json:"totalBytesProcessed,omitempty,string"`
-
- // TotalRows: The total number of rows in the complete query result set,
- // which can be more than the number of rows in this single page of
- // results.
- TotalRows uint64 `json:"totalRows,omitempty,string"`
-}
-
-type Table struct {
- // CreationTime: [Output-only] The time when this table was created, in
- // milliseconds since the epoch.
- CreationTime int64 `json:"creationTime,omitempty,string"`
-
- // Description: [Optional] A user-friendly description of this table.
- Description string `json:"description,omitempty"`
-
- // Etag: [Output-only] A hash of this resource.
- Etag string `json:"etag,omitempty"`
-
- // ExpirationTime: [Optional] The time when this table expires, in
- // milliseconds since the epoch. If not present, the table will persist
- // indefinitely. Expired tables will be deleted and their storage
- // reclaimed.
- ExpirationTime int64 `json:"expirationTime,omitempty,string"`
-
- // FriendlyName: [Optional] A descriptive name for this table.
- FriendlyName string `json:"friendlyName,omitempty"`
-
- // Id: [Output-only] An opaque ID uniquely identifying the table.
- Id string `json:"id,omitempty"`
-
- // Kind: [Output-only] The type of the resource.
- Kind string `json:"kind,omitempty"`
-
- // LastModifiedTime: [Output-only] The time when this table was last
- // modified, in milliseconds since the epoch.
- LastModifiedTime uint64 `json:"lastModifiedTime,omitempty,string"`
-
- // NumBytes: [Output-only] The size of the table in bytes. This property
- // is unavailable for tables that are actively receiving streaming
- // inserts.
- NumBytes int64 `json:"numBytes,omitempty,string"`
-
- // NumRows: [Output-only] The number of rows of data in this table. This
- // property is unavailable for tables that are actively receiving
- // streaming inserts.
- NumRows uint64 `json:"numRows,omitempty,string"`
-
- // Schema: [Optional] Describes the schema of this table.
- Schema *TableSchema `json:"schema,omitempty"`
-
- // SelfLink: [Output-only] A URL that can be used to access this
- // resource again.
- SelfLink string `json:"selfLink,omitempty"`
-
- // TableReference: [Required] Reference describing the ID of this table.
- TableReference *TableReference `json:"tableReference,omitempty"`
-
- // Type: [Output-only] Describes the table type. The following values
- // are supported: TABLE: A normal BigQuery table. VIEW: A virtual table
- // defined by a SQL query. The default value is TABLE.
- Type string `json:"type,omitempty"`
-
- // View: [Optional] The view definition.
- View *ViewDefinition `json:"view,omitempty"`
-}
-
-type TableCell struct {
- V interface{} `json:"v,omitempty"`
-}
-
-type TableDataInsertAllRequest struct {
- // IgnoreUnknownValues: [Optional] Accept rows that contain values that
- // do not match the schema. The unknown values are ignored. Default is
- // false, which treats unknown values as errors.
- IgnoreUnknownValues bool `json:"ignoreUnknownValues,omitempty"`
-
- // Kind: The resource type of the response.
- Kind string `json:"kind,omitempty"`
-
- // Rows: The rows to insert.
- Rows []*TableDataInsertAllRequestRows `json:"rows,omitempty"`
-
- // SkipInvalidRows: [Optional] Insert all valid rows of a request, even
- // if invalid rows exist. The default value is false, which causes the
- // entire request to fail if any invalid rows exist.
- SkipInvalidRows bool `json:"skipInvalidRows,omitempty"`
-}
-
-type TableDataInsertAllRequestRows struct {
- // InsertId: [Optional] A unique ID for each row. BigQuery uses this
- // property to detect duplicate insertion requests on a best-effort
- // basis.
- InsertId string `json:"insertId,omitempty"`
-
- // Json: [Required] A JSON object that contains a row of data. The
- // object's properties and values must match the destination table's
- // schema.
- Json map[string]JsonValue `json:"json,omitempty"`
-}
-
-type TableDataInsertAllResponse struct {
- // InsertErrors: An array of errors for rows that were not inserted.
- InsertErrors []*TableDataInsertAllResponseInsertErrors `json:"insertErrors,omitempty"`
-
- // Kind: The resource type of the response.
- Kind string `json:"kind,omitempty"`
-}
-
-type TableDataInsertAllResponseInsertErrors struct {
- // Errors: Error information for the row indicated by the index
- // property.
- Errors []*ErrorProto `json:"errors,omitempty"`
-
- // Index: The index of the row that error applies to.
- Index int64 `json:"index,omitempty"`
-}
-
-type TableDataList struct {
- // Etag: A hash of this page of results.
- Etag string `json:"etag,omitempty"`
-
- // Kind: The resource type of the response.
- Kind string `json:"kind,omitempty"`
-
- // PageToken: A token used for paging results. Providing this token
- // instead of the startIndex parameter can help you retrieve stable
- // results when an underlying table is changing.
- PageToken string `json:"pageToken,omitempty"`
-
- // Rows: Rows of results.
- Rows []*TableRow `json:"rows,omitempty"`
-
- // TotalRows: The total number of rows in the complete table.
- TotalRows int64 `json:"totalRows,omitempty,string"`
-}
-
-type TableFieldSchema struct {
- // Description: [Optional] The field description. The maximum length is
- // 16K characters.
- Description string `json:"description,omitempty"`
-
- // Fields: [Optional] Describes the nested schema fields if the type
- // property is set to RECORD.
- Fields []*TableFieldSchema `json:"fields,omitempty"`
-
- // Mode: [Optional] The field mode. Possible values include NULLABLE,
- // REQUIRED and REPEATED. The default value is NULLABLE.
- Mode string `json:"mode,omitempty"`
-
- // Name: [Required] The field name. The name must contain only letters
- // (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a
- // letter or underscore. The maximum length is 128 characters.
- Name string `json:"name,omitempty"`
-
- // Type: [Required] The field data type. Possible values include STRING,
- // INTEGER, FLOAT, BOOLEAN, TIMESTAMP or RECORD (where RECORD indicates
- // that the field contains a nested schema).
- Type string `json:"type,omitempty"`
-}
-
-type TableList struct {
- // Etag: A hash of this page of results.
- Etag string `json:"etag,omitempty"`
-
- // Kind: The type of list.
- Kind string `json:"kind,omitempty"`
-
- // NextPageToken: A token to request the next page of results.
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- // Tables: Tables in the requested dataset.
- Tables []*TableListTables `json:"tables,omitempty"`
-
- // TotalItems: The total number of tables in the dataset.
- TotalItems int64 `json:"totalItems,omitempty"`
-}
-
-type TableListTables struct {
- // FriendlyName: The user-friendly name for this table.
- FriendlyName string `json:"friendlyName,omitempty"`
-
- // Id: An opaque ID of the table
- Id string `json:"id,omitempty"`
-
- // Kind: The resource type.
- Kind string `json:"kind,omitempty"`
-
- // TableReference: A reference uniquely identifying the table.
- TableReference *TableReference `json:"tableReference,omitempty"`
-
- // Type: The type of table. Possible values are: TABLE, VIEW.
- Type string `json:"type,omitempty"`
-}
-
-type TableReference struct {
- // DatasetId: [Required] The ID of the dataset containing this table.
- DatasetId string `json:"datasetId,omitempty"`
-
- // ProjectId: [Required] The ID of the project containing this table.
- ProjectId string `json:"projectId,omitempty"`
-
- // TableId: [Required] The ID of the table. The ID must contain only
- // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum
- // length is 1,024 characters.
- TableId string `json:"tableId,omitempty"`
-}
-
-type TableRow struct {
- F []*TableCell `json:"f,omitempty"`
-}
-
-type TableSchema struct {
- // Fields: Describes the fields in a table.
- Fields []*TableFieldSchema `json:"fields,omitempty"`
-}
-
-type ViewDefinition struct {
- // Query: [Required] A query that BigQuery executes when the view is
- // referenced.
- Query string `json:"query,omitempty"`
-}
-
-// method id "bigquery.datasets.delete":
-
-type DatasetsDeleteCall struct {
- s *Service
- projectId string
- datasetId string
- opt_ map[string]interface{}
-}
-
-// Delete: Deletes the dataset specified by the datasetId value. Before
-// you can delete a dataset, you must delete all its tables, either
-// manually or by specifying deleteContents. Immediately after deletion,
-// you can create another dataset with the same name.
-func (r *DatasetsService) Delete(projectId string, datasetId string) *DatasetsDeleteCall {
- c := &DatasetsDeleteCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- return c
-}
-
-// DeleteContents sets the optional parameter "deleteContents": If True,
-// delete all the tables in the dataset. If False and the dataset
-// contains tables, the request will fail. Default is False
-func (c *DatasetsDeleteCall) DeleteContents(deleteContents bool) *DatasetsDeleteCall {
- c.opt_["deleteContents"] = deleteContents
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsDeleteCall) Fields(s ...googleapi.Field) *DatasetsDeleteCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsDeleteCall) Do() error {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["deleteContents"]; ok {
- params.Set("deleteContents", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("DELETE", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return err
- }
- return nil
- // {
- // "description": "Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.",
- // "httpMethod": "DELETE",
- // "id": "bigquery.datasets.delete",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of dataset being deleted",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "deleteContents": {
- // "description": "If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False",
- // "location": "query",
- // "type": "boolean"
- // },
- // "projectId": {
- // "description": "Project ID of the dataset being deleted",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}",
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.datasets.get":
-
-type DatasetsGetCall struct {
- s *Service
- projectId string
- datasetId string
- opt_ map[string]interface{}
-}
-
-// Get: Returns the dataset specified by datasetID.
-func (r *DatasetsService) Get(projectId string, datasetId string) *DatasetsGetCall {
- c := &DatasetsGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsGetCall) Fields(s ...googleapi.Field) *DatasetsGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsGetCall) Do() (*Dataset, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Dataset
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Returns the dataset specified by datasetID.",
- // "httpMethod": "GET",
- // "id": "bigquery.datasets.get",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the requested dataset",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the requested dataset",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}",
- // "response": {
- // "$ref": "Dataset"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.datasets.insert":
-
-type DatasetsInsertCall struct {
- s *Service
- projectId string
- dataset *Dataset
- opt_ map[string]interface{}
-}
-
-// Insert: Creates a new empty dataset.
-func (r *DatasetsService) Insert(projectId string, dataset *Dataset) *DatasetsInsertCall {
- c := &DatasetsInsertCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.dataset = dataset
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsInsertCall) Fields(s ...googleapi.Field) *DatasetsInsertCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsInsertCall) Do() (*Dataset, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.dataset)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Dataset
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Creates a new empty dataset.",
- // "httpMethod": "POST",
- // "id": "bigquery.datasets.insert",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "Project ID of the new dataset",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets",
- // "request": {
- // "$ref": "Dataset"
- // },
- // "response": {
- // "$ref": "Dataset"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.datasets.list":
-
-type DatasetsListCall struct {
- s *Service
- projectId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all datasets in the specified project to which you have
-// been granted the READER dataset role.
-func (r *DatasetsService) List(projectId string) *DatasetsListCall {
- c := &DatasetsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- return c
-}
-
-// All sets the optional parameter "all": Whether to list all datasets,
-// including hidden ones
-func (c *DatasetsListCall) All(all bool) *DatasetsListCall {
- c.opt_["all"] = all
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": The maximum
-// number of results to return
-func (c *DatasetsListCall) MaxResults(maxResults int64) *DatasetsListCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, to request the next page of results
-func (c *DatasetsListCall) PageToken(pageToken string) *DatasetsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsListCall) Fields(s ...googleapi.Field) *DatasetsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsListCall) Do() (*DatasetList, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["all"]; ok {
- params.Set("all", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *DatasetList
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all datasets in the specified project to which you have been granted the READER dataset role.",
- // "httpMethod": "GET",
- // "id": "bigquery.datasets.list",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "all": {
- // "description": "Whether to list all datasets, including hidden ones",
- // "location": "query",
- // "type": "boolean"
- // },
- // "maxResults": {
- // "description": "The maximum number of results to return",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, to request the next page of results",
- // "location": "query",
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the datasets to be listed",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets",
- // "response": {
- // "$ref": "DatasetList"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.datasets.patch":
-
-type DatasetsPatchCall struct {
- s *Service
- projectId string
- datasetId string
- dataset *Dataset
- opt_ map[string]interface{}
-}
-
-// Patch: Updates information in an existing dataset. The update method
-// replaces the entire dataset resource, whereas the patch method only
-// replaces fields that are provided in the submitted dataset resource.
-// This method supports patch semantics.
-func (r *DatasetsService) Patch(projectId string, datasetId string, dataset *Dataset) *DatasetsPatchCall {
- c := &DatasetsPatchCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.dataset = dataset
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsPatchCall) Fields(s ...googleapi.Field) *DatasetsPatchCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsPatchCall) Do() (*Dataset, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.dataset)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PATCH", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Dataset
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.",
- // "httpMethod": "PATCH",
- // "id": "bigquery.datasets.patch",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the dataset being updated",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the dataset being updated",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}",
- // "request": {
- // "$ref": "Dataset"
- // },
- // "response": {
- // "$ref": "Dataset"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.datasets.update":
-
-type DatasetsUpdateCall struct {
- s *Service
- projectId string
- datasetId string
- dataset *Dataset
- opt_ map[string]interface{}
-}
-
-// Update: Updates information in an existing dataset. The update method
-// replaces the entire dataset resource, whereas the patch method only
-// replaces fields that are provided in the submitted dataset resource.
-func (r *DatasetsService) Update(projectId string, datasetId string, dataset *Dataset) *DatasetsUpdateCall {
- c := &DatasetsUpdateCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.dataset = dataset
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *DatasetsUpdateCall) Fields(s ...googleapi.Field) *DatasetsUpdateCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *DatasetsUpdateCall) Do() (*Dataset, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.dataset)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PUT", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Dataset
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.",
- // "httpMethod": "PUT",
- // "id": "bigquery.datasets.update",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the dataset being updated",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the dataset being updated",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}",
- // "request": {
- // "$ref": "Dataset"
- // },
- // "response": {
- // "$ref": "Dataset"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.jobs.get":
-
-type JobsGetCall struct {
- s *Service
- projectId string
- jobId string
- opt_ map[string]interface{}
-}
-
-// Get: Returns information about a specific job. Job information is
-// available for a six month period after creation. Requires that you're
-// the person who ran the job, or have the Is Owner project role.
-func (r *JobsService) Get(projectId string, jobId string) *JobsGetCall {
- c := &JobsGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.jobId = jobId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *JobsGetCall) Fields(s ...googleapi.Field) *JobsGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *JobsGetCall) Do() (*Job, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/jobs/{jobId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "jobId": c.jobId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Job
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.",
- // "httpMethod": "GET",
- // "id": "bigquery.jobs.get",
- // "parameterOrder": [
- // "projectId",
- // "jobId"
- // ],
- // "parameters": {
- // "jobId": {
- // "description": "Job ID of the requested job",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the requested job",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/jobs/{jobId}",
- // "response": {
- // "$ref": "Job"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.jobs.getQueryResults":
-
-type JobsGetQueryResultsCall struct {
- s *Service
- projectId string
- jobId string
- opt_ map[string]interface{}
-}
-
-// GetQueryResults: Retrieves the results of a query job.
-func (r *JobsService) GetQueryResults(projectId string, jobId string) *JobsGetQueryResultsCall {
- c := &JobsGetQueryResultsCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.jobId = jobId
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": Maximum number
-// of results to read
-func (c *JobsGetQueryResultsCall) MaxResults(maxResults int64) *JobsGetQueryResultsCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, to request the next page of results
-func (c *JobsGetQueryResultsCall) PageToken(pageToken string) *JobsGetQueryResultsCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// StartIndex sets the optional parameter "startIndex": Zero-based index
-// of the starting row
-func (c *JobsGetQueryResultsCall) StartIndex(startIndex uint64) *JobsGetQueryResultsCall {
- c.opt_["startIndex"] = startIndex
- return c
-}
-
-// TimeoutMs sets the optional parameter "timeoutMs": How long to wait
-// for the query to complete, in milliseconds, before returning. Default
-// is to return immediately. If the timeout passes before the job
-// completes, the request will fail with a TIMEOUT error
-func (c *JobsGetQueryResultsCall) TimeoutMs(timeoutMs int64) *JobsGetQueryResultsCall {
- c.opt_["timeoutMs"] = timeoutMs
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *JobsGetQueryResultsCall) Fields(s ...googleapi.Field) *JobsGetQueryResultsCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *JobsGetQueryResultsCall) Do() (*GetQueryResultsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["startIndex"]; ok {
- params.Set("startIndex", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["timeoutMs"]; ok {
- params.Set("timeoutMs", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/queries/{jobId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "jobId": c.jobId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *GetQueryResultsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Retrieves the results of a query job.",
- // "httpMethod": "GET",
- // "id": "bigquery.jobs.getQueryResults",
- // "parameterOrder": [
- // "projectId",
- // "jobId"
- // ],
- // "parameters": {
- // "jobId": {
- // "description": "Job ID of the query job",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "maxResults": {
- // "description": "Maximum number of results to read",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, to request the next page of results",
- // "location": "query",
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the query job",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "startIndex": {
- // "description": "Zero-based index of the starting row",
- // "format": "uint64",
- // "location": "query",
- // "type": "string"
- // },
- // "timeoutMs": {
- // "description": "How long to wait for the query to complete, in milliseconds, before returning. Default is to return immediately. If the timeout passes before the job completes, the request will fail with a TIMEOUT error",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // }
- // },
- // "path": "projects/{projectId}/queries/{jobId}",
- // "response": {
- // "$ref": "GetQueryResultsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.jobs.insert":
-
-type JobsInsertCall struct {
- s *Service
- projectId string
- job *Job
- opt_ map[string]interface{}
- media_ io.Reader
- resumable_ googleapi.SizeReaderAt
- mediaType_ string
- ctx_ context.Context
- protocol_ string
-}
-
-// Insert: Starts a new asynchronous job. Requires the Can View project
-// role.
-func (r *JobsService) Insert(projectId string, job *Job) *JobsInsertCall {
- c := &JobsInsertCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.job = job
- return c
-}
-
-// Media specifies the media to upload in a single chunk.
-// At most one of Media and ResumableMedia may be set.
-func (c *JobsInsertCall) Media(r io.Reader) *JobsInsertCall {
- c.media_ = r
- c.protocol_ = "multipart"
- return c
-}
-
-// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
-// At most one of Media and ResumableMedia may be set.
-// mediaType identifies the MIME media type of the upload, such as "image/png".
-// If mediaType is "", it will be auto-detected.
-func (c *JobsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *JobsInsertCall {
- c.ctx_ = ctx
- c.resumable_ = io.NewSectionReader(r, 0, size)
- c.mediaType_ = mediaType
- c.protocol_ = "resumable"
- return c
-}
-
-// ProgressUpdater provides a callback function that will be called after every chunk.
-// It should be a low-latency function in order to not slow down the upload operation.
-// This should only be called when using ResumableMedia (as opposed to Media).
-func (c *JobsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *JobsInsertCall {
- c.opt_["progressUpdater"] = pu
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *JobsInsertCall) Fields(s ...googleapi.Field) *JobsInsertCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *JobsInsertCall) Do() (*Job, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.job)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/jobs")
- var progressUpdater_ googleapi.ProgressUpdater
- if v, ok := c.opt_["progressUpdater"]; ok {
- if pu, ok := v.(googleapi.ProgressUpdater); ok {
- progressUpdater_ = pu
- }
- }
- if c.media_ != nil || c.resumable_ != nil {
- urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
- params.Set("uploadType", c.protocol_)
- }
- urls += "?" + params.Encode()
- if c.protocol_ != "resumable" {
- var cancel func()
- cancel, _ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
- if cancel != nil {
- defer cancel()
- }
- }
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- if c.protocol_ == "resumable" {
- req.ContentLength = 0
- if c.mediaType_ == "" {
- c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
- }
- req.Header.Set("X-Upload-Content-Type", c.mediaType_)
- req.Body = nil
- } else {
- req.Header.Set("Content-Type", ctype)
- }
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- if c.protocol_ == "resumable" {
- loc := res.Header.Get("Location")
- rx := &googleapi.ResumableUpload{
- Client: c.s.client,
- UserAgent: c.s.userAgent(),
- URI: loc,
- Media: c.resumable_,
- MediaType: c.mediaType_,
- ContentLength: c.resumable_.Size(),
- Callback: progressUpdater_,
- }
- res, err = rx.Upload(c.ctx_)
- if err != nil {
- return nil, err
- }
- defer res.Body.Close()
- }
- var ret *Job
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Starts a new asynchronous job. Requires the Can View project role.",
- // "httpMethod": "POST",
- // "id": "bigquery.jobs.insert",
- // "mediaUpload": {
- // "accept": [
- // "*/*"
- // ],
- // "protocols": {
- // "resumable": {
- // "multipart": true,
- // "path": "/resumable/upload/bigquery/v2/projects/{projectId}/jobs"
- // },
- // "simple": {
- // "multipart": true,
- // "path": "/upload/bigquery/v2/projects/{projectId}/jobs"
- // }
- // }
- // },
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "Project ID of the project that will be billed for the job",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/jobs",
- // "request": {
- // "$ref": "Job"
- // },
- // "response": {
- // "$ref": "Job"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/devstorage.full_control",
- // "https://www.googleapis.com/auth/devstorage.read_only",
- // "https://www.googleapis.com/auth/devstorage.read_write"
- // ],
- // "supportsMediaUpload": true
- // }
-
-}
-
-// method id "bigquery.jobs.list":
-
-type JobsListCall struct {
- s *Service
- projectId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all jobs that you started in the specified project. The
-// job list returns in reverse chronological order of when the jobs were
-// created, starting with the most recent job created. Requires the Can
-// View project role, or the Is Owner project role if you set the
-// allUsers property.
-func (r *JobsService) List(projectId string) *JobsListCall {
- c := &JobsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- return c
-}
-
-// AllUsers sets the optional parameter "allUsers": Whether to display
-// jobs owned by all users in the project. Default false
-func (c *JobsListCall) AllUsers(allUsers bool) *JobsListCall {
- c.opt_["allUsers"] = allUsers
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": Maximum number
-// of results to return
-func (c *JobsListCall) MaxResults(maxResults int64) *JobsListCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, to request the next page of results
-func (c *JobsListCall) PageToken(pageToken string) *JobsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Projection sets the optional parameter "projection": Restrict
-// information returned to a set of selected fields
-//
-// Possible values:
-// "full" - Includes all job data
-// "minimal" - Does not include the job configuration
-func (c *JobsListCall) Projection(projection string) *JobsListCall {
- c.opt_["projection"] = projection
- return c
-}
-
-// StateFilter sets the optional parameter "stateFilter": Filter for job
-// state
-//
-// Possible values:
-// "done" - Finished jobs
-// "pending" - Pending jobs
-// "running" - Running jobs
-func (c *JobsListCall) StateFilter(stateFilter string) *JobsListCall {
- c.opt_["stateFilter"] = stateFilter
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *JobsListCall) Fields(s ...googleapi.Field) *JobsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *JobsListCall) Do() (*JobList, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["allUsers"]; ok {
- params.Set("allUsers", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["projection"]; ok {
- params.Set("projection", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["stateFilter"]; ok {
- params.Set("stateFilter", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/jobs")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *JobList
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all jobs that you started in the specified project. The job list returns in reverse chronological order of when the jobs were created, starting with the most recent job created. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.",
- // "httpMethod": "GET",
- // "id": "bigquery.jobs.list",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "allUsers": {
- // "description": "Whether to display jobs owned by all users in the project. Default false",
- // "location": "query",
- // "type": "boolean"
- // },
- // "maxResults": {
- // "description": "Maximum number of results to return",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, to request the next page of results",
- // "location": "query",
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the jobs to list",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projection": {
- // "description": "Restrict information returned to a set of selected fields",
- // "enum": [
- // "full",
- // "minimal"
- // ],
- // "enumDescriptions": [
- // "Includes all job data",
- // "Does not include the job configuration"
- // ],
- // "location": "query",
- // "type": "string"
- // },
- // "stateFilter": {
- // "description": "Filter for job state",
- // "enum": [
- // "done",
- // "pending",
- // "running"
- // ],
- // "enumDescriptions": [
- // "Finished jobs",
- // "Pending jobs",
- // "Running jobs"
- // ],
- // "location": "query",
- // "repeated": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/jobs",
- // "response": {
- // "$ref": "JobList"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.jobs.query":
-
-type JobsQueryCall struct {
- s *Service
- projectId string
- queryrequest *QueryRequest
- opt_ map[string]interface{}
-}
-
-// Query: Runs a BigQuery SQL query synchronously and returns query
-// results if the query completes within a specified timeout.
-func (r *JobsService) Query(projectId string, queryrequest *QueryRequest) *JobsQueryCall {
- c := &JobsQueryCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.queryrequest = queryrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *JobsQueryCall) Fields(s ...googleapi.Field) *JobsQueryCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *JobsQueryCall) Do() (*QueryResponse, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.queryrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/queries")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *QueryResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.",
- // "httpMethod": "POST",
- // "id": "bigquery.jobs.query",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "Project ID of the project billed for the query",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/queries",
- // "request": {
- // "$ref": "QueryRequest"
- // },
- // "response": {
- // "$ref": "QueryResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.projects.list":
-
-type ProjectsListCall struct {
- s *Service
- opt_ map[string]interface{}
-}
-
-// List: Lists all projects to which you have been granted any project
-// role.
-func (r *ProjectsService) List() *ProjectsListCall {
- c := &ProjectsListCall{s: r.s, opt_: make(map[string]interface{})}
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": Maximum number
-// of results to return
-func (c *ProjectsListCall) MaxResults(maxResults int64) *ProjectsListCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, to request the next page of results
-func (c *ProjectsListCall) PageToken(pageToken string) *ProjectsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsListCall) Fields(s ...googleapi.Field) *ProjectsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsListCall) Do() (*ProjectList, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.SetOpaque(req.URL)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ProjectList
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all projects to which you have been granted any project role.",
- // "httpMethod": "GET",
- // "id": "bigquery.projects.list",
- // "parameters": {
- // "maxResults": {
- // "description": "Maximum number of results to return",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, to request the next page of results",
- // "location": "query",
- // "type": "string"
- // }
- // },
- // "path": "projects",
- // "response": {
- // "$ref": "ProjectList"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tabledata.insertAll":
-
-type TabledataInsertAllCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- tabledatainsertallrequest *TableDataInsertAllRequest
- opt_ map[string]interface{}
-}
-
-// InsertAll: Streams data into BigQuery one record at a time without
-// needing to run a load job. Requires the WRITER dataset role.
-func (r *TabledataService) InsertAll(projectId string, datasetId string, tableId string, tabledatainsertallrequest *TableDataInsertAllRequest) *TabledataInsertAllCall {
- c := &TabledataInsertAllCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- c.tabledatainsertallrequest = tabledatainsertallrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TabledataInsertAllCall) Fields(s ...googleapi.Field) *TabledataInsertAllCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TabledataInsertAllCall) Do() (*TableDataInsertAllResponse, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.tabledatainsertallrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *TableDataInsertAllResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.",
- // "httpMethod": "POST",
- // "id": "bigquery.tabledata.insertAll",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the destination table.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the destination table.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the destination table.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll",
- // "request": {
- // "$ref": "TableDataInsertAllRequest"
- // },
- // "response": {
- // "$ref": "TableDataInsertAllResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/bigquery.insertdata",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tabledata.list":
-
-type TabledataListCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- opt_ map[string]interface{}
-}
-
-// List: Retrieves table data from a specified set of rows. Requires the
-// READER dataset role.
-func (r *TabledataService) List(projectId string, datasetId string, tableId string) *TabledataListCall {
- c := &TabledataListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": Maximum number
-// of results to return
-func (c *TabledataListCall) MaxResults(maxResults int64) *TabledataListCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, identifying the result set
-func (c *TabledataListCall) PageToken(pageToken string) *TabledataListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// StartIndex sets the optional parameter "startIndex": Zero-based index
-// of the starting row to read
-func (c *TabledataListCall) StartIndex(startIndex uint64) *TabledataListCall {
- c.opt_["startIndex"] = startIndex
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TabledataListCall) Fields(s ...googleapi.Field) *TabledataListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TabledataListCall) Do() (*TableDataList, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["startIndex"]; ok {
- params.Set("startIndex", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *TableDataList
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Retrieves table data from a specified set of rows. Requires the READER dataset role.",
- // "httpMethod": "GET",
- // "id": "bigquery.tabledata.list",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the table to read",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "maxResults": {
- // "description": "Maximum number of results to return",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, identifying the result set",
- // "location": "query",
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the table to read",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "startIndex": {
- // "description": "Zero-based index of the starting row to read",
- // "format": "uint64",
- // "location": "query",
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the table to read",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data",
- // "response": {
- // "$ref": "TableDataList"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.delete":
-
-type TablesDeleteCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- opt_ map[string]interface{}
-}
-
-// Delete: Deletes the table specified by tableId from the dataset. If
-// the table contains data, all the data will be deleted.
-func (r *TablesService) Delete(projectId string, datasetId string, tableId string) *TablesDeleteCall {
- c := &TablesDeleteCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesDeleteCall) Fields(s ...googleapi.Field) *TablesDeleteCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesDeleteCall) Do() error {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("DELETE", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return err
- }
- return nil
- // {
- // "description": "Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.",
- // "httpMethod": "DELETE",
- // "id": "bigquery.tables.delete",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the table to delete",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the table to delete",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the table to delete",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.get":
-
-type TablesGetCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- opt_ map[string]interface{}
-}
-
-// Get: Gets the specified table resource by table ID. This method does
-// not return the data in the table, it only returns the table resource,
-// which describes the structure of this table.
-func (r *TablesService) Get(projectId string, datasetId string, tableId string) *TablesGetCall {
- c := &TablesGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesGetCall) Fields(s ...googleapi.Field) *TablesGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesGetCall) Do() (*Table, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Table
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.",
- // "httpMethod": "GET",
- // "id": "bigquery.tables.get",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the requested table",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the requested table",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the requested table",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- // "response": {
- // "$ref": "Table"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.insert":
-
-type TablesInsertCall struct {
- s *Service
- projectId string
- datasetId string
- table *Table
- opt_ map[string]interface{}
-}
-
-// Insert: Creates a new, empty table in the dataset.
-func (r *TablesService) Insert(projectId string, datasetId string, table *Table) *TablesInsertCall {
- c := &TablesInsertCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.table = table
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesInsertCall) Fields(s ...googleapi.Field) *TablesInsertCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesInsertCall) Do() (*Table, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.table)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Table
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Creates a new, empty table in the dataset.",
- // "httpMethod": "POST",
- // "id": "bigquery.tables.insert",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the new table",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the new table",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables",
- // "request": {
- // "$ref": "Table"
- // },
- // "response": {
- // "$ref": "Table"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.list":
-
-type TablesListCall struct {
- s *Service
- projectId string
- datasetId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all tables in the specified dataset. Requires the READER
-// dataset role.
-func (r *TablesService) List(projectId string, datasetId string) *TablesListCall {
- c := &TablesListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- return c
-}
-
-// MaxResults sets the optional parameter "maxResults": Maximum number
-// of results to return
-func (c *TablesListCall) MaxResults(maxResults int64) *TablesListCall {
- c.opt_["maxResults"] = maxResults
- return c
-}
-
-// PageToken sets the optional parameter "pageToken": Page token,
-// returned by a previous call, to request the next page of results
-func (c *TablesListCall) PageToken(pageToken string) *TablesListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesListCall) Fields(s ...googleapi.Field) *TablesListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesListCall) Do() (*TableList, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["maxResults"]; ok {
- params.Set("maxResults", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *TableList
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all tables in the specified dataset. Requires the READER dataset role.",
- // "httpMethod": "GET",
- // "id": "bigquery.tables.list",
- // "parameterOrder": [
- // "projectId",
- // "datasetId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the tables to list",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "maxResults": {
- // "description": "Maximum number of results to return",
- // "format": "uint32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "description": "Page token, returned by a previous call, to request the next page of results",
- // "location": "query",
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the tables to list",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables",
- // "response": {
- // "$ref": "TableList"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.patch":
-
-type TablesPatchCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- table *Table
- opt_ map[string]interface{}
-}
-
-// Patch: Updates information in an existing table. The update method
-// replaces the entire table resource, whereas the patch method only
-// replaces fields that are provided in the submitted table resource.
-// This method supports patch semantics.
-func (r *TablesService) Patch(projectId string, datasetId string, tableId string, table *Table) *TablesPatchCall {
- c := &TablesPatchCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- c.table = table
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesPatchCall) Fields(s ...googleapi.Field) *TablesPatchCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesPatchCall) Do() (*Table, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.table)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PATCH", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Table
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.",
- // "httpMethod": "PATCH",
- // "id": "bigquery.tables.patch",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- // "request": {
- // "$ref": "Table"
- // },
- // "response": {
- // "$ref": "Table"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "bigquery.tables.update":
-
-type TablesUpdateCall struct {
- s *Service
- projectId string
- datasetId string
- tableId string
- table *Table
- opt_ map[string]interface{}
-}
-
-// Update: Updates information in an existing table. The update method
-// replaces the entire table resource, whereas the patch method only
-// replaces fields that are provided in the submitted table resource.
-func (r *TablesService) Update(projectId string, datasetId string, tableId string, table *Table) *TablesUpdateCall {
- c := &TablesUpdateCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.datasetId = datasetId
- c.tableId = tableId
- c.table = table
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *TablesUpdateCall) Fields(s ...googleapi.Field) *TablesUpdateCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *TablesUpdateCall) Do() (*Table, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.table)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/datasets/{datasetId}/tables/{tableId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PUT", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "datasetId": c.datasetId,
- "tableId": c.tableId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Table
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.",
- // "httpMethod": "PUT",
- // "id": "bigquery.tables.update",
- // "parameterOrder": [
- // "projectId",
- // "datasetId",
- // "tableId"
- // ],
- // "parameters": {
- // "datasetId": {
- // "description": "Dataset ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "Project ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "tableId": {
- // "description": "Table ID of the table to update",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
- // "request": {
- // "$ref": "Table"
- // },
- // "response": {
- // "$ref": "Table"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/bigquery",
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
diff --git a/vendor/google.golang.org/api/container/v1beta1/container-api.json b/vendor/google.golang.org/api/container/v1beta1/container-api.json
deleted file mode 100644
index 18ef43d..0000000
--- a/vendor/google.golang.org/api/container/v1beta1/container-api.json
+++ /dev/null
@@ -1,621 +0,0 @@
-{
- "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/ReRXGEgk9TcyLgT1qFhzuzuEb7E\"",
- "discoveryVersion": "v1",
- "id": "container:v1beta1",
- "name": "container",
- "version": "v1beta1",
- "revision": "20150504",
- "title": "Google Container Engine API",
- "description": "The Google Container Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology.",
- "ownerDomain": "google.com",
- "ownerName": "Google",
- "icons": {
- "x16": "http://www.google.com/images/icons/product/search-16.gif",
- "x32": "http://www.google.com/images/icons/product/search-32.gif"
- },
- "documentationLink": "https://cloud.google.com/container-engine/docs/v1beta1/",
- "protocol": "rest",
- "baseUrl": "https://www.googleapis.com/container/v1beta1/projects/",
- "basePath": "/container/v1beta1/projects/",
- "rootUrl": "https://www.googleapis.com/",
- "servicePath": "container/v1beta1/projects/",
- "batchPath": "batch",
- "parameters": {
- "alt": {
- "type": "string",
- "description": "Data format for the response.",
- "default": "json",
- "enum": [
- "json"
- ],
- "enumDescriptions": [
- "Responses with Content-Type of application/json"
- ],
- "location": "query"
- },
- "fields": {
- "type": "string",
- "description": "Selector specifying which fields to include in a partial response.",
- "location": "query"
- },
- "key": {
- "type": "string",
- "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
- "location": "query"
- },
- "oauth_token": {
- "type": "string",
- "description": "OAuth 2.0 token for the current user.",
- "location": "query"
- },
- "prettyPrint": {
- "type": "boolean",
- "description": "Returns response with indentations and line breaks.",
- "default": "true",
- "location": "query"
- },
- "quotaUser": {
- "type": "string",
- "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
- "location": "query"
- },
- "userIp": {
- "type": "string",
- "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
- "location": "query"
- }
- },
- "auth": {
- "oauth2": {
- "scopes": {
- "https://www.googleapis.com/auth/cloud-platform": {
- "description": "View and manage your data across Google Cloud Platform services"
- }
- }
- }
- },
- "schemas": {
- "Cluster": {
- "id": "Cluster",
- "type": "object",
- "properties": {
- "clusterApiVersion": {
- "type": "string",
- "description": "The API version of the Kubernetes master and kubelets running in this cluster. Leave blank to pick up the latest stable release, or specify a version of the form \"x.y.z\". The Google Container Engine release notes lists the currently supported versions. If an incorrect version is specified, the server returns an error listing the currently supported versions."
- },
- "containerIpv4Cidr": {
- "type": "string",
- "description": "The IP address range of the container pods in this cluster, in CIDR notation (e.g. 10.96.0.0/14). Leave blank to have one automatically chosen or specify a /14 block in 10.0.0.0/8 or 172.16.0.0/12."
- },
- "creationTimestamp": {
- "type": "string",
- "description": "[Output only] The time the cluster was created, in RFC3339 text format."
- },
- "description": {
- "type": "string",
- "description": "An optional description of this cluster."
- },
- "enableCloudLogging": {
- "type": "boolean",
- "description": "Whether logs from the cluster should be made available via the Google Cloud Logging service. This includes both logs from your applications running in the cluster as well as logs from the Kubernetes components themselves."
- },
- "endpoint": {
- "type": "string",
- "description": "[Output only] The IP address of this cluster's Kubernetes master. The endpoint can be accessed from the internet at https://username:password@endpoint/.\n\nSee the masterAuth property of this resource for username and password information."
- },
- "masterAuth": {
- "$ref": "MasterAuth",
- "description": "The authentication information for accessing the master."
- },
- "name": {
- "type": "string",
- "description": "The name of this cluster. The name must be unique within this project and zone, and can be up to 40 characters with the following restrictions: \n- Lowercase letters, numbers, and hyphens only.\n- Must start with a letter.\n- Must end with a number or a letter."
- },
- "network": {
- "type": "string",
- "description": "The name of the Google Compute Engine network to which the cluster is connected."
- },
- "nodeConfig": {
- "$ref": "NodeConfig",
- "description": "The machine type and image to use for all nodes in this cluster. See the descriptions of the child properties of nodeConfig."
- },
- "nodeRoutingPrefixSize": {
- "type": "integer",
- "description": "[Output only] The size of the address space on each node for hosting containers.",
- "format": "int32"
- },
- "numNodes": {
- "type": "integer",
- "description": "The number of nodes to create in this cluster. You must ensure that your Compute Engine resource quota is sufficient for this number of instances plus one (to include the master). You must also have available firewall and routes quota.",
- "format": "int32"
- },
- "selfLink": {
- "type": "string",
- "description": "[Output only] Server-defined URL for the resource."
- },
- "servicesIpv4Cidr": {
- "type": "string",
- "description": "[Output only] The IP address range of the Kubernetes services in this cluster, in CIDR notation (e.g. 1.2.3.4/29). Service addresses are typically put in the last /16 from the container CIDR."
- },
- "status": {
- "type": "string",
- "description": "[Output only] The current status of this cluster.",
- "enum": [
- "error",
- "provisioning",
- "running",
- "stopping"
- ],
- "enumDescriptions": [
- "",
- "",
- "",
- ""
- ]
- },
- "statusMessage": {
- "type": "string",
- "description": "[Output only] Additional information about the current status of this cluster, if available."
- },
- "zone": {
- "type": "string",
- "description": "[Output only] The name of the Google Compute Engine zone in which the cluster resides."
- }
- }
- },
- "CreateClusterRequest": {
- "id": "CreateClusterRequest",
- "type": "object",
- "properties": {
- "cluster": {
- "$ref": "Cluster",
- "description": "A cluster resource."
- }
- }
- },
- "ListAggregatedClustersResponse": {
- "id": "ListAggregatedClustersResponse",
- "type": "object",
- "properties": {
- "clusters": {
- "type": "array",
- "description": "A list of clusters in the project, across all zones.",
- "items": {
- "$ref": "Cluster"
- }
- }
- }
- },
- "ListAggregatedOperationsResponse": {
- "id": "ListAggregatedOperationsResponse",
- "type": "object",
- "properties": {
- "operations": {
- "type": "array",
- "description": "A list of operations in the project, across all zones.",
- "items": {
- "$ref": "Operation"
- }
- }
- }
- },
- "ListClustersResponse": {
- "id": "ListClustersResponse",
- "type": "object",
- "properties": {
- "clusters": {
- "type": "array",
- "description": "A list of clusters in the project in the specified zone.",
- "items": {
- "$ref": "Cluster"
- }
- }
- }
- },
- "ListOperationsResponse": {
- "id": "ListOperationsResponse",
- "type": "object",
- "properties": {
- "operations": {
- "type": "array",
- "description": "A list of operations in the project in the specified zone.",
- "items": {
- "$ref": "Operation"
- }
- }
- }
- },
- "MasterAuth": {
- "id": "MasterAuth",
- "type": "object",
- "description": "The authentication information for accessing the master. Authentication is either done using HTTP basic authentication or using a bearer token.",
- "properties": {
- "bearerToken": {
- "type": "string",
- "description": "The token used to authenticate API requests to the master. The token is to be included in an HTTP Authorization Header in all requests to the master endpoint. The format of the header is: \"Authorization: Bearer \"."
- },
- "password": {
- "type": "string",
- "description": "The password to use for HTTP basic authentication when accessing the Kubernetes master endpoint. Because the master endpoint is open to the internet, you should create a strong password."
- },
- "user": {
- "type": "string",
- "description": "The username to use for HTTP basic authentication when accessing the Kubernetes master endpoint."
- }
- }
- },
- "NodeConfig": {
- "id": "NodeConfig",
- "type": "object",
- "properties": {
- "machineType": {
- "type": "string",
- "description": "The name of a Google Compute Engine machine type (e.g. n1-standard-1).\n\nIf unspecified, the default machine type is n1-standard-1."
- },
- "serviceAccounts": {
- "type": "array",
- "description": "The optional list of ServiceAccounts, each with their specified scopes, to be made available on all of the node VMs. In addition to the service accounts and scopes specified, the \"default\" account will always be created with the following scopes to ensure the correct functioning of the cluster: \n- https://www.googleapis.com/auth/compute,\n- https://www.googleapis.com/auth/devstorage.read_only",
- "items": {
- "$ref": "ServiceAccount"
- }
- },
- "sourceImage": {
- "type": "string",
- "description": "The fully-specified name of a Google Compute Engine image. For example: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/backports-debian-7-wheezy-vYYYYMMDD (where YYYMMDD is the version date).\n\nIf specifying an image, you are responsible for ensuring its compatibility with the Debian 7 backports image. We recommend leaving this field blank to accept the default backports-debian-7-wheezy value."
- }
- }
- },
- "Operation": {
- "id": "Operation",
- "type": "object",
- "description": "Defines the operation resource. All fields are output only.",
- "properties": {
- "errorMessage": {
- "type": "string",
- "description": "If an error has occurred, a textual description of the error."
- },
- "name": {
- "type": "string",
- "description": "The server-assigned ID for the operation."
- },
- "operationType": {
- "type": "string",
- "description": "The operation type.",
- "enum": [
- "createCluster",
- "deleteCluster"
- ],
- "enumDescriptions": [
- "",
- ""
- ]
- },
- "selfLink": {
- "type": "string",
- "description": "Server-defined URL for the resource."
- },
- "status": {
- "type": "string",
- "description": "The current status of the operation.",
- "enum": [
- "done",
- "pending",
- "running"
- ],
- "enumDescriptions": [
- "",
- "",
- ""
- ]
- },
- "target": {
- "type": "string",
- "description": "[Optional] The URL of the cluster resource that this operation is associated with."
- },
- "targetLink": {
- "type": "string",
- "description": "Server-defined URL for the target of the operation."
- },
- "zone": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the operation is taking place."
- }
- }
- },
- "ServiceAccount": {
- "id": "ServiceAccount",
- "type": "object",
- "description": "A Compute Engine service account.",
- "properties": {
- "email": {
- "type": "string",
- "description": "Email address of the service account."
- },
- "scopes": {
- "type": "array",
- "description": "The list of scopes to be made available for this service account.",
- "items": {
- "type": "string"
- }
- }
- }
- }
- },
- "resources": {
- "projects": {
- "resources": {
- "clusters": {
- "methods": {
- "list": {
- "id": "container.projects.clusters.list",
- "path": "{projectId}/clusters",
- "httpMethod": "GET",
- "description": "Lists all clusters owned by a project across all zones.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "response": {
- "$ref": "ListAggregatedClustersResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "operations": {
- "methods": {
- "list": {
- "id": "container.projects.operations.list",
- "path": "{projectId}/operations",
- "httpMethod": "GET",
- "description": "Lists all operations in a project, across all zones.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId"
- ],
- "response": {
- "$ref": "ListAggregatedOperationsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "zones": {
- "resources": {
- "clusters": {
- "methods": {
- "create": {
- "id": "container.projects.zones.clusters.create",
- "path": "{projectId}/zones/{zoneId}/clusters",
- "httpMethod": "POST",
- "description": "Creates a cluster, consisting of the specified number and type of Google Compute Engine instances, plus a Kubernetes master instance.\n\nThe cluster is created in the project's default network.\n\nA firewall is added that allows traffic into port 443 on the master, which enables HTTPS. A firewall and a route is added for each node to allow the containers on that node to communicate with all other instances in the cluster.\n\nFinally, an entry is added to the project's global metadata indicating which CIDR range is being used by the cluster.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId"
- ],
- "request": {
- "$ref": "CreateClusterRequest"
- },
- "response": {
- "$ref": "Operation"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "delete": {
- "id": "container.projects.zones.clusters.delete",
- "path": "{projectId}/zones/{zoneId}/clusters/{clusterId}",
- "httpMethod": "DELETE",
- "description": "Deletes the cluster, including the Kubernetes master and all worker nodes.\n\nFirewalls and routes that were configured at cluster creation are also deleted.",
- "parameters": {
- "clusterId": {
- "type": "string",
- "description": "The name of the cluster to delete.",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId",
- "clusterId"
- ],
- "response": {
- "$ref": "Operation"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "get": {
- "id": "container.projects.zones.clusters.get",
- "path": "{projectId}/zones/{zoneId}/clusters/{clusterId}",
- "httpMethod": "GET",
- "description": "Gets a specific cluster.",
- "parameters": {
- "clusterId": {
- "type": "string",
- "description": "The name of the cluster to retrieve.",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId",
- "clusterId"
- ],
- "response": {
- "$ref": "Cluster"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "list": {
- "id": "container.projects.zones.clusters.list",
- "path": "{projectId}/zones/{zoneId}/clusters",
- "httpMethod": "GET",
- "description": "Lists all clusters owned by a project in the specified zone.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId"
- ],
- "response": {
- "$ref": "ListClustersResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- },
- "operations": {
- "methods": {
- "get": {
- "id": "container.projects.zones.operations.get",
- "path": "{projectId}/zones/{zoneId}/operations/{operationId}",
- "httpMethod": "GET",
- "description": "Gets the specified operation.",
- "parameters": {
- "operationId": {
- "type": "string",
- "description": "The server-assigned name of the operation.",
- "required": true,
- "location": "path"
- },
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone in which the operation resides. This is always the same zone as the cluster with which the operation is associated.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId",
- "operationId"
- ],
- "response": {
- "$ref": "Operation"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- },
- "list": {
- "id": "container.projects.zones.operations.list",
- "path": "{projectId}/zones/{zoneId}/operations",
- "httpMethod": "GET",
- "description": "Lists all operations in a project in a specific zone.",
- "parameters": {
- "projectId": {
- "type": "string",
- "description": "The Google Developers Console project ID or project number.",
- "required": true,
- "location": "path"
- },
- "zoneId": {
- "type": "string",
- "description": "The name of the Google Compute Engine zone to return operations for.",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "projectId",
- "zoneId"
- ],
- "response": {
- "$ref": "ListOperationsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform"
- ]
- }
- }
- }
- }
- }
- }
- }
- }
-}
diff --git a/vendor/google.golang.org/api/container/v1beta1/container-gen.go b/vendor/google.golang.org/api/container/v1beta1/container-gen.go
deleted file mode 100644
index 17ea4cc..0000000
--- a/vendor/google.golang.org/api/container/v1beta1/container-gen.go
+++ /dev/null
@@ -1,1077 +0,0 @@
-// Package container provides access to the Google Container Engine API.
-//
-// See https://cloud.google.com/container-engine/docs/v1beta1/
-//
-// Usage example:
-//
-// import "google.golang.org/api/container/v1beta1"
-// ...
-// containerService, err := container.New(oauthHttpClient)
-package container
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "golang.org/x/net/context"
- "google.golang.org/api/googleapi"
- "io"
- "net/http"
- "net/url"
- "strconv"
- "strings"
-)
-
-// Always reference these packages, just in case the auto-generated code
-// below doesn't.
-var _ = bytes.NewBuffer
-var _ = strconv.Itoa
-var _ = fmt.Sprintf
-var _ = json.NewDecoder
-var _ = io.Copy
-var _ = url.Parse
-var _ = googleapi.Version
-var _ = errors.New
-var _ = strings.Replace
-var _ = context.Background
-
-const apiId = "container:v1beta1"
-const apiName = "container"
-const apiVersion = "v1beta1"
-const basePath = "https://www.googleapis.com/container/v1beta1/projects/"
-
-// OAuth2 scopes used by this API.
-const (
- // View and manage your data across Google Cloud Platform services
- CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
-)
-
-func New(client *http.Client) (*Service, error) {
- if client == nil {
- return nil, errors.New("client is nil")
- }
- s := &Service{client: client, BasePath: basePath}
- s.Projects = NewProjectsService(s)
- return s, nil
-}
-
-type Service struct {
- client *http.Client
- BasePath string // API endpoint base URL
- UserAgent string // optional additional User-Agent fragment
-
- Projects *ProjectsService
-}
-
-func (s *Service) userAgent() string {
- if s.UserAgent == "" {
- return googleapi.UserAgent
- }
- return googleapi.UserAgent + " " + s.UserAgent
-}
-
-func NewProjectsService(s *Service) *ProjectsService {
- rs := &ProjectsService{s: s}
- rs.Clusters = NewProjectsClustersService(s)
- rs.Operations = NewProjectsOperationsService(s)
- rs.Zones = NewProjectsZonesService(s)
- return rs
-}
-
-type ProjectsService struct {
- s *Service
-
- Clusters *ProjectsClustersService
-
- Operations *ProjectsOperationsService
-
- Zones *ProjectsZonesService
-}
-
-func NewProjectsClustersService(s *Service) *ProjectsClustersService {
- rs := &ProjectsClustersService{s: s}
- return rs
-}
-
-type ProjectsClustersService struct {
- s *Service
-}
-
-func NewProjectsOperationsService(s *Service) *ProjectsOperationsService {
- rs := &ProjectsOperationsService{s: s}
- return rs
-}
-
-type ProjectsOperationsService struct {
- s *Service
-}
-
-func NewProjectsZonesService(s *Service) *ProjectsZonesService {
- rs := &ProjectsZonesService{s: s}
- rs.Clusters = NewProjectsZonesClustersService(s)
- rs.Operations = NewProjectsZonesOperationsService(s)
- return rs
-}
-
-type ProjectsZonesService struct {
- s *Service
-
- Clusters *ProjectsZonesClustersService
-
- Operations *ProjectsZonesOperationsService
-}
-
-func NewProjectsZonesClustersService(s *Service) *ProjectsZonesClustersService {
- rs := &ProjectsZonesClustersService{s: s}
- return rs
-}
-
-type ProjectsZonesClustersService struct {
- s *Service
-}
-
-func NewProjectsZonesOperationsService(s *Service) *ProjectsZonesOperationsService {
- rs := &ProjectsZonesOperationsService{s: s}
- return rs
-}
-
-type ProjectsZonesOperationsService struct {
- s *Service
-}
-
-type Cluster struct {
- // ClusterApiVersion: The API version of the Kubernetes master and
- // kubelets running in this cluster. Leave blank to pick up the latest
- // stable release, or specify a version of the form "x.y.z". The Google
- // Container Engine release notes lists the currently supported
- // versions. If an incorrect version is specified, the server returns an
- // error listing the currently supported versions.
- ClusterApiVersion string `json:"clusterApiVersion,omitempty"`
-
- // ContainerIpv4Cidr: The IP address range of the container pods in this
- // cluster, in CIDR notation (e.g. 10.96.0.0/14). Leave blank to have
- // one automatically chosen or specify a /14 block in 10.0.0.0/8 or
- // 172.16.0.0/12.
- ContainerIpv4Cidr string `json:"containerIpv4Cidr,omitempty"`
-
- // CreationTimestamp: [Output only] The time the cluster was created, in
- // RFC3339 text format.
- CreationTimestamp string `json:"creationTimestamp,omitempty"`
-
- // Description: An optional description of this cluster.
- Description string `json:"description,omitempty"`
-
- // EnableCloudLogging: Whether logs from the cluster should be made
- // available via the Google Cloud Logging service. This includes both
- // logs from your applications running in the cluster as well as logs
- // from the Kubernetes components themselves.
- EnableCloudLogging bool `json:"enableCloudLogging,omitempty"`
-
- // Endpoint: [Output only] The IP address of this cluster's Kubernetes
- // master. The endpoint can be accessed from the internet at
- // https://username:password@endpoint/.
- //
- // See the masterAuth property of this resource for username and
- // password information.
- Endpoint string `json:"endpoint,omitempty"`
-
- // MasterAuth: The authentication information for accessing the master.
- MasterAuth *MasterAuth `json:"masterAuth,omitempty"`
-
- // Name: The name of this cluster. The name must be unique within this
- // project and zone, and can be up to 40 characters with the following
- // restrictions:
- // - Lowercase letters, numbers, and hyphens only.
- // - Must start with a letter.
- // - Must end with a number or a letter.
- Name string `json:"name,omitempty"`
-
- // Network: The name of the Google Compute Engine network to which the
- // cluster is connected.
- Network string `json:"network,omitempty"`
-
- // NodeConfig: The machine type and image to use for all nodes in this
- // cluster. See the descriptions of the child properties of nodeConfig.
- NodeConfig *NodeConfig `json:"nodeConfig,omitempty"`
-
- // NodeRoutingPrefixSize: [Output only] The size of the address space on
- // each node for hosting containers.
- NodeRoutingPrefixSize int64 `json:"nodeRoutingPrefixSize,omitempty"`
-
- // NumNodes: The number of nodes to create in this cluster. You must
- // ensure that your Compute Engine resource quota is sufficient for this
- // number of instances plus one (to include the master). You must also
- // have available firewall and routes quota.
- NumNodes int64 `json:"numNodes,omitempty"`
-
- // SelfLink: [Output only] Server-defined URL for the resource.
- SelfLink string `json:"selfLink,omitempty"`
-
- // ServicesIpv4Cidr: [Output only] The IP address range of the
- // Kubernetes services in this cluster, in CIDR notation (e.g.
- // 1.2.3.4/29). Service addresses are typically put in the last /16 from
- // the container CIDR.
- ServicesIpv4Cidr string `json:"servicesIpv4Cidr,omitempty"`
-
- // Status: [Output only] The current status of this cluster.
- //
- // Possible values:
- // "error"
- // "provisioning"
- // "running"
- // "stopping"
- Status string `json:"status,omitempty"`
-
- // StatusMessage: [Output only] Additional information about the current
- // status of this cluster, if available.
- StatusMessage string `json:"statusMessage,omitempty"`
-
- // Zone: [Output only] The name of the Google Compute Engine zone in
- // which the cluster resides.
- Zone string `json:"zone,omitempty"`
-}
-
-type CreateClusterRequest struct {
- // Cluster: A cluster resource.
- Cluster *Cluster `json:"cluster,omitempty"`
-}
-
-type ListAggregatedClustersResponse struct {
- // Clusters: A list of clusters in the project, across all zones.
- Clusters []*Cluster `json:"clusters,omitempty"`
-}
-
-type ListAggregatedOperationsResponse struct {
- // Operations: A list of operations in the project, across all zones.
- Operations []*Operation `json:"operations,omitempty"`
-}
-
-type ListClustersResponse struct {
- // Clusters: A list of clusters in the project in the specified zone.
- Clusters []*Cluster `json:"clusters,omitempty"`
-}
-
-type ListOperationsResponse struct {
- // Operations: A list of operations in the project in the specified
- // zone.
- Operations []*Operation `json:"operations,omitempty"`
-}
-
-type MasterAuth struct {
- // BearerToken: The token used to authenticate API requests to the
- // master. The token is to be included in an HTTP Authorization Header
- // in all requests to the master endpoint. The format of the header is:
- // "Authorization: Bearer ".
- BearerToken string `json:"bearerToken,omitempty"`
-
- // Password: The password to use for HTTP basic authentication when
- // accessing the Kubernetes master endpoint. Because the master endpoint
- // is open to the internet, you should create a strong password.
- Password string `json:"password,omitempty"`
-
- // User: The username to use for HTTP basic authentication when
- // accessing the Kubernetes master endpoint.
- User string `json:"user,omitempty"`
-}
-
-type NodeConfig struct {
- // MachineType: The name of a Google Compute Engine machine type (e.g.
- // n1-standard-1).
- //
- // If unspecified, the default machine type is n1-standard-1.
- MachineType string `json:"machineType,omitempty"`
-
- // ServiceAccounts: The optional list of ServiceAccounts, each with
- // their specified scopes, to be made available on all of the node VMs.
- // In addition to the service accounts and scopes specified, the
- // "default" account will always be created with the following scopes to
- // ensure the correct functioning of the cluster:
- // - https://www.googleapis.com/auth/compute,
- // - https://www.googleapis.com/auth/devstorage.read_only
- ServiceAccounts []*ServiceAccount `json:"serviceAccounts,omitempty"`
-
- // SourceImage: The fully-specified name of a Google Compute Engine
- // image. For example:
- // https://www.googleapis.com/compute/v1/projects/debian-cloud/global/ima
- // ges/backports-debian-7-wheezy-vYYYYMMDD (where YYYMMDD is the version
- // date).
- //
- // If specifying an image, you are responsible for ensuring its
- // compatibility with the Debian 7 backports image. We recommend leaving
- // this field blank to accept the default backports-debian-7-wheezy
- // value.
- SourceImage string `json:"sourceImage,omitempty"`
-}
-
-type Operation struct {
- // ErrorMessage: If an error has occurred, a textual description of the
- // error.
- ErrorMessage string `json:"errorMessage,omitempty"`
-
- // Name: The server-assigned ID for the operation.
- Name string `json:"name,omitempty"`
-
- // OperationType: The operation type.
- //
- // Possible values:
- // "createCluster"
- // "deleteCluster"
- OperationType string `json:"operationType,omitempty"`
-
- // SelfLink: Server-defined URL for the resource.
- SelfLink string `json:"selfLink,omitempty"`
-
- // Status: The current status of the operation.
- //
- // Possible values:
- // "done"
- // "pending"
- // "running"
- Status string `json:"status,omitempty"`
-
- // Target: [Optional] The URL of the cluster resource that this
- // operation is associated with.
- Target string `json:"target,omitempty"`
-
- // TargetLink: Server-defined URL for the target of the operation.
- TargetLink string `json:"targetLink,omitempty"`
-
- // Zone: The name of the Google Compute Engine zone in which the
- // operation is taking place.
- Zone string `json:"zone,omitempty"`
-}
-
-type ServiceAccount struct {
- // Email: Email address of the service account.
- Email string `json:"email,omitempty"`
-
- // Scopes: The list of scopes to be made available for this service
- // account.
- Scopes []string `json:"scopes,omitempty"`
-}
-
-// method id "container.projects.clusters.list":
-
-type ProjectsClustersListCall struct {
- s *Service
- projectId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all clusters owned by a project across all zones.
-func (r *ProjectsClustersService) List(projectId string) *ProjectsClustersListCall {
- c := &ProjectsClustersListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsClustersListCall) Fields(s ...googleapi.Field) *ProjectsClustersListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsClustersListCall) Do() (*ListAggregatedClustersResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/clusters")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListAggregatedClustersResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all clusters owned by a project across all zones.",
- // "httpMethod": "GET",
- // "id": "container.projects.clusters.list",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/clusters",
- // "response": {
- // "$ref": "ListAggregatedClustersResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.operations.list":
-
-type ProjectsOperationsListCall struct {
- s *Service
- projectId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all operations in a project, across all zones.
-func (r *ProjectsOperationsService) List(projectId string) *ProjectsOperationsListCall {
- c := &ProjectsOperationsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsOperationsListCall) Fields(s ...googleapi.Field) *ProjectsOperationsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsOperationsListCall) Do() (*ListAggregatedOperationsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/operations")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListAggregatedOperationsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all operations in a project, across all zones.",
- // "httpMethod": "GET",
- // "id": "container.projects.operations.list",
- // "parameterOrder": [
- // "projectId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/operations",
- // "response": {
- // "$ref": "ListAggregatedOperationsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.clusters.create":
-
-type ProjectsZonesClustersCreateCall struct {
- s *Service
- projectId string
- zoneId string
- createclusterrequest *CreateClusterRequest
- opt_ map[string]interface{}
-}
-
-// Create: Creates a cluster, consisting of the specified number and
-// type of Google Compute Engine instances, plus a Kubernetes master
-// instance.
-//
-// The cluster is created in the project's default network.
-//
-// A firewall is added that allows traffic into port 443 on the master,
-// which enables HTTPS. A firewall and a route is added for each node to
-// allow the containers on that node to communicate with all other
-// instances in the cluster.
-//
-// Finally, an entry is added to the project's global metadata
-// indicating which CIDR range is being used by the cluster.
-func (r *ProjectsZonesClustersService) Create(projectId string, zoneId string, createclusterrequest *CreateClusterRequest) *ProjectsZonesClustersCreateCall {
- c := &ProjectsZonesClustersCreateCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- c.createclusterrequest = createclusterrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesClustersCreateCall) Fields(s ...googleapi.Field) *ProjectsZonesClustersCreateCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesClustersCreateCall) Do() (*Operation, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.createclusterrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/clusters")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Operation
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Creates a cluster, consisting of the specified number and type of Google Compute Engine instances, plus a Kubernetes master instance.\n\nThe cluster is created in the project's default network.\n\nA firewall is added that allows traffic into port 443 on the master, which enables HTTPS. A firewall and a route is added for each node to allow the containers on that node to communicate with all other instances in the cluster.\n\nFinally, an entry is added to the project's global metadata indicating which CIDR range is being used by the cluster.",
- // "httpMethod": "POST",
- // "id": "container.projects.zones.clusters.create",
- // "parameterOrder": [
- // "projectId",
- // "zoneId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/clusters",
- // "request": {
- // "$ref": "CreateClusterRequest"
- // },
- // "response": {
- // "$ref": "Operation"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.clusters.delete":
-
-type ProjectsZonesClustersDeleteCall struct {
- s *Service
- projectId string
- zoneId string
- clusterId string
- opt_ map[string]interface{}
-}
-
-// Delete: Deletes the cluster, including the Kubernetes master and all
-// worker nodes.
-//
-// Firewalls and routes that were configured at cluster creation are
-// also deleted.
-func (r *ProjectsZonesClustersService) Delete(projectId string, zoneId string, clusterId string) *ProjectsZonesClustersDeleteCall {
- c := &ProjectsZonesClustersDeleteCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- c.clusterId = clusterId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesClustersDeleteCall) Fields(s ...googleapi.Field) *ProjectsZonesClustersDeleteCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesClustersDeleteCall) Do() (*Operation, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/clusters/{clusterId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("DELETE", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- "clusterId": c.clusterId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Operation
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Deletes the cluster, including the Kubernetes master and all worker nodes.\n\nFirewalls and routes that were configured at cluster creation are also deleted.",
- // "httpMethod": "DELETE",
- // "id": "container.projects.zones.clusters.delete",
- // "parameterOrder": [
- // "projectId",
- // "zoneId",
- // "clusterId"
- // ],
- // "parameters": {
- // "clusterId": {
- // "description": "The name of the cluster to delete.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/clusters/{clusterId}",
- // "response": {
- // "$ref": "Operation"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.clusters.get":
-
-type ProjectsZonesClustersGetCall struct {
- s *Service
- projectId string
- zoneId string
- clusterId string
- opt_ map[string]interface{}
-}
-
-// Get: Gets a specific cluster.
-func (r *ProjectsZonesClustersService) Get(projectId string, zoneId string, clusterId string) *ProjectsZonesClustersGetCall {
- c := &ProjectsZonesClustersGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- c.clusterId = clusterId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesClustersGetCall) Fields(s ...googleapi.Field) *ProjectsZonesClustersGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesClustersGetCall) Do() (*Cluster, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/clusters/{clusterId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- "clusterId": c.clusterId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Cluster
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Gets a specific cluster.",
- // "httpMethod": "GET",
- // "id": "container.projects.zones.clusters.get",
- // "parameterOrder": [
- // "projectId",
- // "zoneId",
- // "clusterId"
- // ],
- // "parameters": {
- // "clusterId": {
- // "description": "The name of the cluster to retrieve.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/clusters/{clusterId}",
- // "response": {
- // "$ref": "Cluster"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.clusters.list":
-
-type ProjectsZonesClustersListCall struct {
- s *Service
- projectId string
- zoneId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all clusters owned by a project in the specified zone.
-func (r *ProjectsZonesClustersService) List(projectId string, zoneId string) *ProjectsZonesClustersListCall {
- c := &ProjectsZonesClustersListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesClustersListCall) Fields(s ...googleapi.Field) *ProjectsZonesClustersListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesClustersListCall) Do() (*ListClustersResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/clusters")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListClustersResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all clusters owned by a project in the specified zone.",
- // "httpMethod": "GET",
- // "id": "container.projects.zones.clusters.list",
- // "parameterOrder": [
- // "projectId",
- // "zoneId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone in which the cluster resides.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/clusters",
- // "response": {
- // "$ref": "ListClustersResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.operations.get":
-
-type ProjectsZonesOperationsGetCall struct {
- s *Service
- projectId string
- zoneId string
- operationId string
- opt_ map[string]interface{}
-}
-
-// Get: Gets the specified operation.
-func (r *ProjectsZonesOperationsService) Get(projectId string, zoneId string, operationId string) *ProjectsZonesOperationsGetCall {
- c := &ProjectsZonesOperationsGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- c.operationId = operationId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesOperationsGetCall) Fields(s ...googleapi.Field) *ProjectsZonesOperationsGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesOperationsGetCall) Do() (*Operation, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/operations/{operationId}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- "operationId": c.operationId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Operation
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Gets the specified operation.",
- // "httpMethod": "GET",
- // "id": "container.projects.zones.operations.get",
- // "parameterOrder": [
- // "projectId",
- // "zoneId",
- // "operationId"
- // ],
- // "parameters": {
- // "operationId": {
- // "description": "The server-assigned name of the operation.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone in which the operation resides. This is always the same zone as the cluster with which the operation is associated.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/operations/{operationId}",
- // "response": {
- // "$ref": "Operation"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
-
-// method id "container.projects.zones.operations.list":
-
-type ProjectsZonesOperationsListCall struct {
- s *Service
- projectId string
- zoneId string
- opt_ map[string]interface{}
-}
-
-// List: Lists all operations in a project in a specific zone.
-func (r *ProjectsZonesOperationsService) List(projectId string, zoneId string) *ProjectsZonesOperationsListCall {
- c := &ProjectsZonesOperationsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.projectId = projectId
- c.zoneId = zoneId
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsZonesOperationsListCall) Fields(s ...googleapi.Field) *ProjectsZonesOperationsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsZonesOperationsListCall) Do() (*ListOperationsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{projectId}/zones/{zoneId}/operations")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "projectId": c.projectId,
- "zoneId": c.zoneId,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListOperationsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists all operations in a project in a specific zone.",
- // "httpMethod": "GET",
- // "id": "container.projects.zones.operations.list",
- // "parameterOrder": [
- // "projectId",
- // "zoneId"
- // ],
- // "parameters": {
- // "projectId": {
- // "description": "The Google Developers Console project ID or project number.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // },
- // "zoneId": {
- // "description": "The name of the Google Compute Engine zone to return operations for.",
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{projectId}/zones/{zoneId}/operations",
- // "response": {
- // "$ref": "ListOperationsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform"
- // ]
- // }
-
-}
diff --git a/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-api.json b/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-api.json
deleted file mode 100644
index e0aa7cf..0000000
--- a/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-api.json
+++ /dev/null
@@ -1,679 +0,0 @@
-{
- "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/k747AQVNKzUoa08QT-Z1GxOMZC0\"",
- "discoveryVersion": "v1",
- "id": "pubsub:v1beta2",
- "name": "pubsub",
- "version": "v1beta2",
- "revision": "20150326",
- "title": "Google Cloud Pub/Sub API",
- "description": "Provides reliable, many-to-many, asynchronous messaging between applications.",
- "ownerDomain": "google.com",
- "ownerName": "Google",
- "icons": {
- "x16": "http://www.google.com/images/icons/product/search-16.gif",
- "x32": "http://www.google.com/images/icons/product/search-32.gif"
- },
- "documentationLink": "",
- "protocol": "rest",
- "baseUrl": "https://pubsub.googleapis.com/v1beta2/",
- "basePath": "/v1beta2/",
- "rootUrl": "https://pubsub.googleapis.com/",
- "servicePath": "v1beta2/",
- "batchPath": "batch",
- "parameters": {
- "alt": {
- "type": "string",
- "description": "Data format for the response.",
- "default": "json",
- "enum": [
- "json"
- ],
- "enumDescriptions": [
- "Responses with Content-Type of application/json"
- ],
- "location": "query"
- },
- "fields": {
- "type": "string",
- "description": "Selector specifying which fields to include in a partial response.",
- "location": "query"
- },
- "key": {
- "type": "string",
- "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
- "location": "query"
- },
- "oauth_token": {
- "type": "string",
- "description": "OAuth 2.0 token for the current user.",
- "location": "query"
- },
- "prettyPrint": {
- "type": "boolean",
- "description": "Returns response with indentations and line breaks.",
- "default": "true",
- "location": "query"
- },
- "quotaUser": {
- "type": "string",
- "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
- "location": "query"
- },
- "userIp": {
- "type": "string",
- "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
- "location": "query"
- }
- },
- "auth": {
- "oauth2": {
- "scopes": {
- "https://www.googleapis.com/auth/cloud-platform": {
- "description": "View and manage your data across Google Cloud Platform services"
- },
- "https://www.googleapis.com/auth/pubsub": {
- "description": "View and manage Pub/Sub topics and subscriptions"
- }
- }
- }
- },
- "schemas": {
- "AcknowledgeRequest": {
- "id": "AcknowledgeRequest",
- "type": "object",
- "properties": {
- "ackIds": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "Empty": {
- "id": "Empty",
- "type": "object"
- },
- "ListSubscriptionsResponse": {
- "id": "ListSubscriptionsResponse",
- "type": "object",
- "properties": {
- "nextPageToken": {
- "type": "string"
- },
- "subscriptions": {
- "type": "array",
- "items": {
- "$ref": "Subscription"
- }
- }
- }
- },
- "ListTopicSubscriptionsResponse": {
- "id": "ListTopicSubscriptionsResponse",
- "type": "object",
- "properties": {
- "nextPageToken": {
- "type": "string"
- },
- "subscriptions": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "ListTopicsResponse": {
- "id": "ListTopicsResponse",
- "type": "object",
- "properties": {
- "nextPageToken": {
- "type": "string"
- },
- "topics": {
- "type": "array",
- "items": {
- "$ref": "Topic"
- }
- }
- }
- },
- "ModifyAckDeadlineRequest": {
- "id": "ModifyAckDeadlineRequest",
- "type": "object",
- "properties": {
- "ackDeadlineSeconds": {
- "type": "integer",
- "format": "int32"
- },
- "ackId": {
- "type": "string"
- }
- }
- },
- "ModifyPushConfigRequest": {
- "id": "ModifyPushConfigRequest",
- "type": "object",
- "properties": {
- "pushConfig": {
- "$ref": "PushConfig"
- }
- }
- },
- "PublishRequest": {
- "id": "PublishRequest",
- "type": "object",
- "properties": {
- "messages": {
- "type": "array",
- "items": {
- "$ref": "PubsubMessage"
- }
- }
- }
- },
- "PublishResponse": {
- "id": "PublishResponse",
- "type": "object",
- "properties": {
- "messageIds": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "PubsubMessage": {
- "id": "PubsubMessage",
- "type": "object",
- "properties": {
- "attributes": {
- "type": "object",
- "additionalProperties": {
- "type": "string"
- }
- },
- "data": {
- "type": "string",
- "format": "byte"
- },
- "messageId": {
- "type": "string"
- }
- }
- },
- "PullRequest": {
- "id": "PullRequest",
- "type": "object",
- "properties": {
- "maxMessages": {
- "type": "integer",
- "format": "int32"
- },
- "returnImmediately": {
- "type": "boolean"
- }
- }
- },
- "PullResponse": {
- "id": "PullResponse",
- "type": "object",
- "properties": {
- "receivedMessages": {
- "type": "array",
- "items": {
- "$ref": "ReceivedMessage"
- }
- }
- }
- },
- "PushConfig": {
- "id": "PushConfig",
- "type": "object",
- "properties": {
- "attributes": {
- "type": "object",
- "additionalProperties": {
- "type": "string"
- }
- },
- "pushEndpoint": {
- "type": "string"
- }
- }
- },
- "ReceivedMessage": {
- "id": "ReceivedMessage",
- "type": "object",
- "properties": {
- "ackId": {
- "type": "string"
- },
- "message": {
- "$ref": "PubsubMessage"
- }
- }
- },
- "Subscription": {
- "id": "Subscription",
- "type": "object",
- "properties": {
- "ackDeadlineSeconds": {
- "type": "integer",
- "format": "int32"
- },
- "name": {
- "type": "string"
- },
- "pushConfig": {
- "$ref": "PushConfig"
- },
- "topic": {
- "type": "string"
- }
- }
- },
- "Topic": {
- "id": "Topic",
- "type": "object",
- "properties": {
- "name": {
- "type": "string"
- }
- }
- }
- },
- "resources": {
- "projects": {
- "resources": {
- "subscriptions": {
- "methods": {
- "acknowledge": {
- "id": "pubsub.projects.subscriptions.acknowledge",
- "path": "{+subscription}:acknowledge",
- "httpMethod": "POST",
- "description": "Acknowledges the messages associated with the ack tokens in the AcknowledgeRequest. The Pub/Sub system can remove the relevant messages from the subscription. Acknowledging a message whose ack deadline has expired may succeed, but such a message may be redelivered later. Acknowledging a message more than once will not result in an error.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "request": {
- "$ref": "AcknowledgeRequest"
- },
- "response": {
- "$ref": "Empty"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "create": {
- "id": "pubsub.projects.subscriptions.create",
- "path": "{+name}",
- "httpMethod": "PUT",
- "description": "Creates a subscription to a given topic for a given subscriber. If the subscription already exists, returns ALREADY_EXISTS. If the corresponding topic doesn't exist, returns NOT_FOUND. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic.",
- "parameters": {
- "name": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "name"
- ],
- "request": {
- "$ref": "Subscription"
- },
- "response": {
- "$ref": "Subscription"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "delete": {
- "id": "pubsub.projects.subscriptions.delete",
- "path": "{+subscription}",
- "httpMethod": "DELETE",
- "description": "Deletes an existing subscription. All pending messages in the subscription are immediately dropped. Calls to Pull after deletion will return NOT_FOUND. After a subscription is deleted, a new one may be created with the same name, but the new one has no association with the old subscription, or its topic unless the same topic is specified.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "response": {
- "$ref": "Empty"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "get": {
- "id": "pubsub.projects.subscriptions.get",
- "path": "{+subscription}",
- "httpMethod": "GET",
- "description": "Gets the configuration details of a subscription.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "response": {
- "$ref": "Subscription"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "list": {
- "id": "pubsub.projects.subscriptions.list",
- "path": "{+project}/subscriptions",
- "httpMethod": "GET",
- "description": "Lists matching subscriptions.",
- "parameters": {
- "pageSize": {
- "type": "integer",
- "format": "int32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "location": "query"
- },
- "project": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "project"
- ],
- "response": {
- "$ref": "ListSubscriptionsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "modifyAckDeadline": {
- "id": "pubsub.projects.subscriptions.modifyAckDeadline",
- "path": "{+subscription}:modifyAckDeadline",
- "httpMethod": "POST",
- "description": "Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the subscriber, or to make the message available for redelivery if the processing was interrupted.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "request": {
- "$ref": "ModifyAckDeadlineRequest"
- },
- "response": {
- "$ref": "Empty"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "modifyPushConfig": {
- "id": "pubsub.projects.subscriptions.modifyPushConfig",
- "path": "{+subscription}:modifyPushConfig",
- "httpMethod": "POST",
- "description": "Modifies the PushConfig for a specified subscription. This may be used to change a push subscription to a pull one (signified by an empty PushConfig) or vice versa, or change the endpoint URL and other attributes of a push subscription. Messages will accumulate for delivery continuously through the call regardless of changes to the PushConfig.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "request": {
- "$ref": "ModifyPushConfigRequest"
- },
- "response": {
- "$ref": "Empty"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "pull": {
- "id": "pubsub.projects.subscriptions.pull",
- "path": "{+subscription}:pull",
- "httpMethod": "POST",
- "description": "Pulls messages from the server. Returns an empty list if there are no messages available in the backlog. The server may return UNAVAILABLE if there are too many concurrent pull requests pending for the given subscription.",
- "parameters": {
- "subscription": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "subscription"
- ],
- "request": {
- "$ref": "PullRequest"
- },
- "response": {
- "$ref": "PullResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- }
- }
- },
- "topics": {
- "methods": {
- "create": {
- "id": "pubsub.projects.topics.create",
- "path": "{+name}",
- "httpMethod": "PUT",
- "description": "Creates the given topic with the given name.",
- "parameters": {
- "name": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "name"
- ],
- "request": {
- "$ref": "Topic"
- },
- "response": {
- "$ref": "Topic"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "delete": {
- "id": "pubsub.projects.topics.delete",
- "path": "{+topic}",
- "httpMethod": "DELETE",
- "description": "Deletes the topic with the given name. Returns NOT_FOUND if the topic does not exist. After a topic is deleted, a new topic may be created with the same name; this is an entirely new topic with none of the old configuration or subscriptions. Existing subscriptions to this topic are not deleted.",
- "parameters": {
- "topic": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "topic"
- ],
- "response": {
- "$ref": "Empty"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "get": {
- "id": "pubsub.projects.topics.get",
- "path": "{+topic}",
- "httpMethod": "GET",
- "description": "Gets the configuration of a topic.",
- "parameters": {
- "topic": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "topic"
- ],
- "response": {
- "$ref": "Topic"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "list": {
- "id": "pubsub.projects.topics.list",
- "path": "{+project}/topics",
- "httpMethod": "GET",
- "description": "Lists matching topics.",
- "parameters": {
- "pageSize": {
- "type": "integer",
- "format": "int32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "location": "query"
- },
- "project": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "project"
- ],
- "response": {
- "$ref": "ListTopicsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- },
- "publish": {
- "id": "pubsub.projects.topics.publish",
- "path": "{+topic}:publish",
- "httpMethod": "POST",
- "description": "Adds one or more messages to the topic. Returns NOT_FOUND if the topic does not exist.",
- "parameters": {
- "topic": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "topic"
- ],
- "request": {
- "$ref": "PublishRequest"
- },
- "response": {
- "$ref": "PublishResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- }
- },
- "resources": {
- "subscriptions": {
- "methods": {
- "list": {
- "id": "pubsub.projects.topics.subscriptions.list",
- "path": "{+topic}/subscriptions",
- "httpMethod": "GET",
- "description": "Lists the name of the subscriptions for this topic.",
- "parameters": {
- "pageSize": {
- "type": "integer",
- "format": "int32",
- "location": "query"
- },
- "pageToken": {
- "type": "string",
- "location": "query"
- },
- "topic": {
- "type": "string",
- "required": true,
- "location": "path"
- }
- },
- "parameterOrder": [
- "topic"
- ],
- "response": {
- "$ref": "ListTopicSubscriptionsResponse"
- },
- "scopes": [
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/pubsub"
- ]
- }
- }
- }
- }
- }
- }
- }
- }
-}
diff --git a/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-gen.go b/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-gen.go
deleted file mode 100644
index 632df9f..0000000
--- a/vendor/google.golang.org/api/pubsub/v1beta2/pubsub-gen.go
+++ /dev/null
@@ -1,1458 +0,0 @@
-// Package pubsub provides access to the Google Cloud Pub/Sub API.
-//
-// Usage example:
-//
-// import "google.golang.org/api/pubsub/v1beta2"
-// ...
-// pubsubService, err := pubsub.New(oauthHttpClient)
-package pubsub
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "golang.org/x/net/context"
- "google.golang.org/api/googleapi"
- "io"
- "net/http"
- "net/url"
- "strconv"
- "strings"
-)
-
-// Always reference these packages, just in case the auto-generated code
-// below doesn't.
-var _ = bytes.NewBuffer
-var _ = strconv.Itoa
-var _ = fmt.Sprintf
-var _ = json.NewDecoder
-var _ = io.Copy
-var _ = url.Parse
-var _ = googleapi.Version
-var _ = errors.New
-var _ = strings.Replace
-var _ = context.Background
-
-const apiId = "pubsub:v1beta2"
-const apiName = "pubsub"
-const apiVersion = "v1beta2"
-const basePath = "https://pubsub.googleapis.com/v1beta2/"
-
-// OAuth2 scopes used by this API.
-const (
- // View and manage your data across Google Cloud Platform services
- CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
-
- // View and manage Pub/Sub topics and subscriptions
- PubsubScope = "https://www.googleapis.com/auth/pubsub"
-)
-
-func New(client *http.Client) (*Service, error) {
- if client == nil {
- return nil, errors.New("client is nil")
- }
- s := &Service{client: client, BasePath: basePath}
- s.Projects = NewProjectsService(s)
- return s, nil
-}
-
-type Service struct {
- client *http.Client
- BasePath string // API endpoint base URL
- UserAgent string // optional additional User-Agent fragment
-
- Projects *ProjectsService
-}
-
-func (s *Service) userAgent() string {
- if s.UserAgent == "" {
- return googleapi.UserAgent
- }
- return googleapi.UserAgent + " " + s.UserAgent
-}
-
-func NewProjectsService(s *Service) *ProjectsService {
- rs := &ProjectsService{s: s}
- rs.Subscriptions = NewProjectsSubscriptionsService(s)
- rs.Topics = NewProjectsTopicsService(s)
- return rs
-}
-
-type ProjectsService struct {
- s *Service
-
- Subscriptions *ProjectsSubscriptionsService
-
- Topics *ProjectsTopicsService
-}
-
-func NewProjectsSubscriptionsService(s *Service) *ProjectsSubscriptionsService {
- rs := &ProjectsSubscriptionsService{s: s}
- return rs
-}
-
-type ProjectsSubscriptionsService struct {
- s *Service
-}
-
-func NewProjectsTopicsService(s *Service) *ProjectsTopicsService {
- rs := &ProjectsTopicsService{s: s}
- rs.Subscriptions = NewProjectsTopicsSubscriptionsService(s)
- return rs
-}
-
-type ProjectsTopicsService struct {
- s *Service
-
- Subscriptions *ProjectsTopicsSubscriptionsService
-}
-
-func NewProjectsTopicsSubscriptionsService(s *Service) *ProjectsTopicsSubscriptionsService {
- rs := &ProjectsTopicsSubscriptionsService{s: s}
- return rs
-}
-
-type ProjectsTopicsSubscriptionsService struct {
- s *Service
-}
-
-type AcknowledgeRequest struct {
- AckIds []string `json:"ackIds,omitempty"`
-}
-
-type Empty struct {
-}
-
-type ListSubscriptionsResponse struct {
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- Subscriptions []*Subscription `json:"subscriptions,omitempty"`
-}
-
-type ListTopicSubscriptionsResponse struct {
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- Subscriptions []string `json:"subscriptions,omitempty"`
-}
-
-type ListTopicsResponse struct {
- NextPageToken string `json:"nextPageToken,omitempty"`
-
- Topics []*Topic `json:"topics,omitempty"`
-}
-
-type ModifyAckDeadlineRequest struct {
- AckDeadlineSeconds int64 `json:"ackDeadlineSeconds,omitempty"`
-
- AckId string `json:"ackId,omitempty"`
-}
-
-type ModifyPushConfigRequest struct {
- PushConfig *PushConfig `json:"pushConfig,omitempty"`
-}
-
-type PublishRequest struct {
- Messages []*PubsubMessage `json:"messages,omitempty"`
-}
-
-type PublishResponse struct {
- MessageIds []string `json:"messageIds,omitempty"`
-}
-
-type PubsubMessage struct {
- Attributes map[string]string `json:"attributes,omitempty"`
-
- Data string `json:"data,omitempty"`
-
- MessageId string `json:"messageId,omitempty"`
-}
-
-type PullRequest struct {
- MaxMessages int64 `json:"maxMessages,omitempty"`
-
- ReturnImmediately bool `json:"returnImmediately,omitempty"`
-}
-
-type PullResponse struct {
- ReceivedMessages []*ReceivedMessage `json:"receivedMessages,omitempty"`
-}
-
-type PushConfig struct {
- Attributes map[string]string `json:"attributes,omitempty"`
-
- PushEndpoint string `json:"pushEndpoint,omitempty"`
-}
-
-type ReceivedMessage struct {
- AckId string `json:"ackId,omitempty"`
-
- Message *PubsubMessage `json:"message,omitempty"`
-}
-
-type Subscription struct {
- AckDeadlineSeconds int64 `json:"ackDeadlineSeconds,omitempty"`
-
- Name string `json:"name,omitempty"`
-
- PushConfig *PushConfig `json:"pushConfig,omitempty"`
-
- Topic string `json:"topic,omitempty"`
-}
-
-type Topic struct {
- Name string `json:"name,omitempty"`
-}
-
-// method id "pubsub.projects.subscriptions.acknowledge":
-
-type ProjectsSubscriptionsAcknowledgeCall struct {
- s *Service
- subscription string
- acknowledgerequest *AcknowledgeRequest
- opt_ map[string]interface{}
-}
-
-// Acknowledge: Acknowledges the messages associated with the ack tokens
-// in the AcknowledgeRequest. The Pub/Sub system can remove the relevant
-// messages from the subscription. Acknowledging a message whose ack
-// deadline has expired may succeed, but such a message may be
-// redelivered later. Acknowledging a message more than once will not
-// result in an error.
-func (r *ProjectsSubscriptionsService) Acknowledge(subscription string, acknowledgerequest *AcknowledgeRequest) *ProjectsSubscriptionsAcknowledgeCall {
- c := &ProjectsSubscriptionsAcknowledgeCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- c.acknowledgerequest = acknowledgerequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsAcknowledgeCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsAcknowledgeCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsAcknowledgeCall) Do() (*Empty, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.acknowledgerequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}:acknowledge")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Empty
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Acknowledges the messages associated with the ack tokens in the AcknowledgeRequest. The Pub/Sub system can remove the relevant messages from the subscription. Acknowledging a message whose ack deadline has expired may succeed, but such a message may be redelivered later. Acknowledging a message more than once will not result in an error.",
- // "httpMethod": "POST",
- // "id": "pubsub.projects.subscriptions.acknowledge",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}:acknowledge",
- // "request": {
- // "$ref": "AcknowledgeRequest"
- // },
- // "response": {
- // "$ref": "Empty"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.create":
-
-type ProjectsSubscriptionsCreateCall struct {
- s *Service
- name string
- subscription *Subscription
- opt_ map[string]interface{}
-}
-
-// Create: Creates a subscription to a given topic for a given
-// subscriber. If the subscription already exists, returns
-// ALREADY_EXISTS. If the corresponding topic doesn't exist, returns
-// NOT_FOUND. If the name is not provided in the request, the server
-// will assign a random name for this subscription on the same project
-// as the topic.
-func (r *ProjectsSubscriptionsService) Create(name string, subscription *Subscription) *ProjectsSubscriptionsCreateCall {
- c := &ProjectsSubscriptionsCreateCall{s: r.s, opt_: make(map[string]interface{})}
- c.name = name
- c.subscription = subscription
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsCreateCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsCreateCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsCreateCall) Do() (*Subscription, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.subscription)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+name}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PUT", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "name": c.name,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Subscription
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Creates a subscription to a given topic for a given subscriber. If the subscription already exists, returns ALREADY_EXISTS. If the corresponding topic doesn't exist, returns NOT_FOUND. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic.",
- // "httpMethod": "PUT",
- // "id": "pubsub.projects.subscriptions.create",
- // "parameterOrder": [
- // "name"
- // ],
- // "parameters": {
- // "name": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+name}",
- // "request": {
- // "$ref": "Subscription"
- // },
- // "response": {
- // "$ref": "Subscription"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.delete":
-
-type ProjectsSubscriptionsDeleteCall struct {
- s *Service
- subscription string
- opt_ map[string]interface{}
-}
-
-// Delete: Deletes an existing subscription. All pending messages in the
-// subscription are immediately dropped. Calls to Pull after deletion
-// will return NOT_FOUND. After a subscription is deleted, a new one may
-// be created with the same name, but the new one has no association
-// with the old subscription, or its topic unless the same topic is
-// specified.
-func (r *ProjectsSubscriptionsService) Delete(subscription string) *ProjectsSubscriptionsDeleteCall {
- c := &ProjectsSubscriptionsDeleteCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsDeleteCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsDeleteCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsDeleteCall) Do() (*Empty, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("DELETE", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Empty
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Deletes an existing subscription. All pending messages in the subscription are immediately dropped. Calls to Pull after deletion will return NOT_FOUND. After a subscription is deleted, a new one may be created with the same name, but the new one has no association with the old subscription, or its topic unless the same topic is specified.",
- // "httpMethod": "DELETE",
- // "id": "pubsub.projects.subscriptions.delete",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}",
- // "response": {
- // "$ref": "Empty"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.get":
-
-type ProjectsSubscriptionsGetCall struct {
- s *Service
- subscription string
- opt_ map[string]interface{}
-}
-
-// Get: Gets the configuration details of a subscription.
-func (r *ProjectsSubscriptionsService) Get(subscription string) *ProjectsSubscriptionsGetCall {
- c := &ProjectsSubscriptionsGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsGetCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsGetCall) Do() (*Subscription, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Subscription
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Gets the configuration details of a subscription.",
- // "httpMethod": "GET",
- // "id": "pubsub.projects.subscriptions.get",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}",
- // "response": {
- // "$ref": "Subscription"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.list":
-
-type ProjectsSubscriptionsListCall struct {
- s *Service
- project string
- opt_ map[string]interface{}
-}
-
-// List: Lists matching subscriptions.
-func (r *ProjectsSubscriptionsService) List(project string) *ProjectsSubscriptionsListCall {
- c := &ProjectsSubscriptionsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.project = project
- return c
-}
-
-// PageSize sets the optional parameter "pageSize":
-func (c *ProjectsSubscriptionsListCall) PageSize(pageSize int64) *ProjectsSubscriptionsListCall {
- c.opt_["pageSize"] = pageSize
- return c
-}
-
-// PageToken sets the optional parameter "pageToken":
-func (c *ProjectsSubscriptionsListCall) PageToken(pageToken string) *ProjectsSubscriptionsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsListCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsListCall) Do() (*ListSubscriptionsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["pageSize"]; ok {
- params.Set("pageSize", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+project}/subscriptions")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "project": c.project,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListSubscriptionsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists matching subscriptions.",
- // "httpMethod": "GET",
- // "id": "pubsub.projects.subscriptions.list",
- // "parameterOrder": [
- // "project"
- // ],
- // "parameters": {
- // "pageSize": {
- // "format": "int32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "location": "query",
- // "type": "string"
- // },
- // "project": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+project}/subscriptions",
- // "response": {
- // "$ref": "ListSubscriptionsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.modifyAckDeadline":
-
-type ProjectsSubscriptionsModifyAckDeadlineCall struct {
- s *Service
- subscription string
- modifyackdeadlinerequest *ModifyAckDeadlineRequest
- opt_ map[string]interface{}
-}
-
-// ModifyAckDeadline: Modifies the ack deadline for a specific message.
-// This method is useful to indicate that more time is needed to process
-// a message by the subscriber, or to make the message available for
-// redelivery if the processing was interrupted.
-func (r *ProjectsSubscriptionsService) ModifyAckDeadline(subscription string, modifyackdeadlinerequest *ModifyAckDeadlineRequest) *ProjectsSubscriptionsModifyAckDeadlineCall {
- c := &ProjectsSubscriptionsModifyAckDeadlineCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- c.modifyackdeadlinerequest = modifyackdeadlinerequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsModifyAckDeadlineCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsModifyAckDeadlineCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsModifyAckDeadlineCall) Do() (*Empty, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.modifyackdeadlinerequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}:modifyAckDeadline")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Empty
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the subscriber, or to make the message available for redelivery if the processing was interrupted.",
- // "httpMethod": "POST",
- // "id": "pubsub.projects.subscriptions.modifyAckDeadline",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}:modifyAckDeadline",
- // "request": {
- // "$ref": "ModifyAckDeadlineRequest"
- // },
- // "response": {
- // "$ref": "Empty"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.modifyPushConfig":
-
-type ProjectsSubscriptionsModifyPushConfigCall struct {
- s *Service
- subscription string
- modifypushconfigrequest *ModifyPushConfigRequest
- opt_ map[string]interface{}
-}
-
-// ModifyPushConfig: Modifies the PushConfig for a specified
-// subscription. This may be used to change a push subscription to a
-// pull one (signified by an empty PushConfig) or vice versa, or change
-// the endpoint URL and other attributes of a push subscription.
-// Messages will accumulate for delivery continuously through the call
-// regardless of changes to the PushConfig.
-func (r *ProjectsSubscriptionsService) ModifyPushConfig(subscription string, modifypushconfigrequest *ModifyPushConfigRequest) *ProjectsSubscriptionsModifyPushConfigCall {
- c := &ProjectsSubscriptionsModifyPushConfigCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- c.modifypushconfigrequest = modifypushconfigrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsModifyPushConfigCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsModifyPushConfigCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsModifyPushConfigCall) Do() (*Empty, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.modifypushconfigrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}:modifyPushConfig")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Empty
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Modifies the PushConfig for a specified subscription. This may be used to change a push subscription to a pull one (signified by an empty PushConfig) or vice versa, or change the endpoint URL and other attributes of a push subscription. Messages will accumulate for delivery continuously through the call regardless of changes to the PushConfig.",
- // "httpMethod": "POST",
- // "id": "pubsub.projects.subscriptions.modifyPushConfig",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}:modifyPushConfig",
- // "request": {
- // "$ref": "ModifyPushConfigRequest"
- // },
- // "response": {
- // "$ref": "Empty"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.subscriptions.pull":
-
-type ProjectsSubscriptionsPullCall struct {
- s *Service
- subscription string
- pullrequest *PullRequest
- opt_ map[string]interface{}
-}
-
-// Pull: Pulls messages from the server. Returns an empty list if there
-// are no messages available in the backlog. The server may return
-// UNAVAILABLE if there are too many concurrent pull requests pending
-// for the given subscription.
-func (r *ProjectsSubscriptionsService) Pull(subscription string, pullrequest *PullRequest) *ProjectsSubscriptionsPullCall {
- c := &ProjectsSubscriptionsPullCall{s: r.s, opt_: make(map[string]interface{})}
- c.subscription = subscription
- c.pullrequest = pullrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsSubscriptionsPullCall) Fields(s ...googleapi.Field) *ProjectsSubscriptionsPullCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsSubscriptionsPullCall) Do() (*PullResponse, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.pullrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+subscription}:pull")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "subscription": c.subscription,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *PullResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Pulls messages from the server. Returns an empty list if there are no messages available in the backlog. The server may return UNAVAILABLE if there are too many concurrent pull requests pending for the given subscription.",
- // "httpMethod": "POST",
- // "id": "pubsub.projects.subscriptions.pull",
- // "parameterOrder": [
- // "subscription"
- // ],
- // "parameters": {
- // "subscription": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+subscription}:pull",
- // "request": {
- // "$ref": "PullRequest"
- // },
- // "response": {
- // "$ref": "PullResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.create":
-
-type ProjectsTopicsCreateCall struct {
- s *Service
- name string
- topic *Topic
- opt_ map[string]interface{}
-}
-
-// Create: Creates the given topic with the given name.
-func (r *ProjectsTopicsService) Create(name string, topic *Topic) *ProjectsTopicsCreateCall {
- c := &ProjectsTopicsCreateCall{s: r.s, opt_: make(map[string]interface{})}
- c.name = name
- c.topic = topic
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsCreateCall) Fields(s ...googleapi.Field) *ProjectsTopicsCreateCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsCreateCall) Do() (*Topic, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.topic)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+name}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("PUT", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "name": c.name,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Topic
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Creates the given topic with the given name.",
- // "httpMethod": "PUT",
- // "id": "pubsub.projects.topics.create",
- // "parameterOrder": [
- // "name"
- // ],
- // "parameters": {
- // "name": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+name}",
- // "request": {
- // "$ref": "Topic"
- // },
- // "response": {
- // "$ref": "Topic"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.delete":
-
-type ProjectsTopicsDeleteCall struct {
- s *Service
- topic string
- opt_ map[string]interface{}
-}
-
-// Delete: Deletes the topic with the given name. Returns NOT_FOUND if
-// the topic does not exist. After a topic is deleted, a new topic may
-// be created with the same name; this is an entirely new topic with
-// none of the old configuration or subscriptions. Existing
-// subscriptions to this topic are not deleted.
-func (r *ProjectsTopicsService) Delete(topic string) *ProjectsTopicsDeleteCall {
- c := &ProjectsTopicsDeleteCall{s: r.s, opt_: make(map[string]interface{})}
- c.topic = topic
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsDeleteCall) Fields(s ...googleapi.Field) *ProjectsTopicsDeleteCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsDeleteCall) Do() (*Empty, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+topic}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("DELETE", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "topic": c.topic,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Empty
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Deletes the topic with the given name. Returns NOT_FOUND if the topic does not exist. After a topic is deleted, a new topic may be created with the same name; this is an entirely new topic with none of the old configuration or subscriptions. Existing subscriptions to this topic are not deleted.",
- // "httpMethod": "DELETE",
- // "id": "pubsub.projects.topics.delete",
- // "parameterOrder": [
- // "topic"
- // ],
- // "parameters": {
- // "topic": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+topic}",
- // "response": {
- // "$ref": "Empty"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.get":
-
-type ProjectsTopicsGetCall struct {
- s *Service
- topic string
- opt_ map[string]interface{}
-}
-
-// Get: Gets the configuration of a topic.
-func (r *ProjectsTopicsService) Get(topic string) *ProjectsTopicsGetCall {
- c := &ProjectsTopicsGetCall{s: r.s, opt_: make(map[string]interface{})}
- c.topic = topic
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsGetCall) Fields(s ...googleapi.Field) *ProjectsTopicsGetCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsGetCall) Do() (*Topic, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+topic}")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "topic": c.topic,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *Topic
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Gets the configuration of a topic.",
- // "httpMethod": "GET",
- // "id": "pubsub.projects.topics.get",
- // "parameterOrder": [
- // "topic"
- // ],
- // "parameters": {
- // "topic": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+topic}",
- // "response": {
- // "$ref": "Topic"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.list":
-
-type ProjectsTopicsListCall struct {
- s *Service
- project string
- opt_ map[string]interface{}
-}
-
-// List: Lists matching topics.
-func (r *ProjectsTopicsService) List(project string) *ProjectsTopicsListCall {
- c := &ProjectsTopicsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.project = project
- return c
-}
-
-// PageSize sets the optional parameter "pageSize":
-func (c *ProjectsTopicsListCall) PageSize(pageSize int64) *ProjectsTopicsListCall {
- c.opt_["pageSize"] = pageSize
- return c
-}
-
-// PageToken sets the optional parameter "pageToken":
-func (c *ProjectsTopicsListCall) PageToken(pageToken string) *ProjectsTopicsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsListCall) Fields(s ...googleapi.Field) *ProjectsTopicsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsListCall) Do() (*ListTopicsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["pageSize"]; ok {
- params.Set("pageSize", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+project}/topics")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "project": c.project,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListTopicsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists matching topics.",
- // "httpMethod": "GET",
- // "id": "pubsub.projects.topics.list",
- // "parameterOrder": [
- // "project"
- // ],
- // "parameters": {
- // "pageSize": {
- // "format": "int32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "location": "query",
- // "type": "string"
- // },
- // "project": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+project}/topics",
- // "response": {
- // "$ref": "ListTopicsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.publish":
-
-type ProjectsTopicsPublishCall struct {
- s *Service
- topic string
- publishrequest *PublishRequest
- opt_ map[string]interface{}
-}
-
-// Publish: Adds one or more messages to the topic. Returns NOT_FOUND if
-// the topic does not exist.
-func (r *ProjectsTopicsService) Publish(topic string, publishrequest *PublishRequest) *ProjectsTopicsPublishCall {
- c := &ProjectsTopicsPublishCall{s: r.s, opt_: make(map[string]interface{})}
- c.topic = topic
- c.publishrequest = publishrequest
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsPublishCall) Fields(s ...googleapi.Field) *ProjectsTopicsPublishCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsPublishCall) Do() (*PublishResponse, error) {
- var body io.Reader = nil
- body, err := googleapi.WithoutDataWrapper.JSONReader(c.publishrequest)
- if err != nil {
- return nil, err
- }
- ctype := "application/json"
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+topic}:publish")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("POST", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "topic": c.topic,
- })
- req.Header.Set("Content-Type", ctype)
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *PublishResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Adds one or more messages to the topic. Returns NOT_FOUND if the topic does not exist.",
- // "httpMethod": "POST",
- // "id": "pubsub.projects.topics.publish",
- // "parameterOrder": [
- // "topic"
- // ],
- // "parameters": {
- // "topic": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+topic}:publish",
- // "request": {
- // "$ref": "PublishRequest"
- // },
- // "response": {
- // "$ref": "PublishResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
-
-// method id "pubsub.projects.topics.subscriptions.list":
-
-type ProjectsTopicsSubscriptionsListCall struct {
- s *Service
- topic string
- opt_ map[string]interface{}
-}
-
-// List: Lists the name of the subscriptions for this topic.
-func (r *ProjectsTopicsSubscriptionsService) List(topic string) *ProjectsTopicsSubscriptionsListCall {
- c := &ProjectsTopicsSubscriptionsListCall{s: r.s, opt_: make(map[string]interface{})}
- c.topic = topic
- return c
-}
-
-// PageSize sets the optional parameter "pageSize":
-func (c *ProjectsTopicsSubscriptionsListCall) PageSize(pageSize int64) *ProjectsTopicsSubscriptionsListCall {
- c.opt_["pageSize"] = pageSize
- return c
-}
-
-// PageToken sets the optional parameter "pageToken":
-func (c *ProjectsTopicsSubscriptionsListCall) PageToken(pageToken string) *ProjectsTopicsSubscriptionsListCall {
- c.opt_["pageToken"] = pageToken
- return c
-}
-
-// Fields allows partial responses to be retrieved.
-// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
-// for more information.
-func (c *ProjectsTopicsSubscriptionsListCall) Fields(s ...googleapi.Field) *ProjectsTopicsSubscriptionsListCall {
- c.opt_["fields"] = googleapi.CombineFields(s)
- return c
-}
-
-func (c *ProjectsTopicsSubscriptionsListCall) Do() (*ListTopicSubscriptionsResponse, error) {
- var body io.Reader = nil
- params := make(url.Values)
- params.Set("alt", "json")
- if v, ok := c.opt_["pageSize"]; ok {
- params.Set("pageSize", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["pageToken"]; ok {
- params.Set("pageToken", fmt.Sprintf("%v", v))
- }
- if v, ok := c.opt_["fields"]; ok {
- params.Set("fields", fmt.Sprintf("%v", v))
- }
- urls := googleapi.ResolveRelative(c.s.BasePath, "{+topic}/subscriptions")
- urls += "?" + params.Encode()
- req, _ := http.NewRequest("GET", urls, body)
- googleapi.Expand(req.URL, map[string]string{
- "topic": c.topic,
- })
- req.Header.Set("User-Agent", c.s.userAgent())
- res, err := c.s.client.Do(req)
- if err != nil {
- return nil, err
- }
- defer googleapi.CloseBody(res)
- if err := googleapi.CheckResponse(res); err != nil {
- return nil, err
- }
- var ret *ListTopicSubscriptionsResponse
- if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
- return nil, err
- }
- return ret, nil
- // {
- // "description": "Lists the name of the subscriptions for this topic.",
- // "httpMethod": "GET",
- // "id": "pubsub.projects.topics.subscriptions.list",
- // "parameterOrder": [
- // "topic"
- // ],
- // "parameters": {
- // "pageSize": {
- // "format": "int32",
- // "location": "query",
- // "type": "integer"
- // },
- // "pageToken": {
- // "location": "query",
- // "type": "string"
- // },
- // "topic": {
- // "location": "path",
- // "required": true,
- // "type": "string"
- // }
- // },
- // "path": "{+topic}/subscriptions",
- // "response": {
- // "$ref": "ListTopicSubscriptionsResponse"
- // },
- // "scopes": [
- // "https://www.googleapis.com/auth/cloud-platform",
- // "https://www.googleapis.com/auth/pubsub"
- // ]
- // }
-
-}
diff --git a/vendor/google.golang.org/appengine/.travis.yml b/vendor/google.golang.org/appengine/.travis.yml
deleted file mode 100644
index 7715209..0000000
--- a/vendor/google.golang.org/appengine/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: go
-sudo: false
-
-go:
- - 1.4
-
-install:
- - go get -v -t -d google.golang.org/appengine/...
- - mkdir sdk
- - curl -o sdk.zip "https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.24.zip"
- - unzip sdk.zip -d sdk
- - export APPENGINE_DEV_APPSERVER=$(pwd)/sdk/go_appengine/dev_appserver.py
-
-script:
- - go version
- - go test -v google.golang.org/appengine/...
- - go test -v -race google.golang.org/appengine/...
- - sdk/go_appengine/goapp test -v google.golang.org/appengine/...
diff --git a/vendor/google.golang.org/appengine/LICENSE b/vendor/google.golang.org/appengine/LICENSE
deleted file mode 100644
index d645695..0000000
--- a/vendor/google.golang.org/appengine/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/google.golang.org/appengine/README.md b/vendor/google.golang.org/appengine/README.md
deleted file mode 100644
index 5ae34df..0000000
--- a/vendor/google.golang.org/appengine/README.md
+++ /dev/null
@@ -1,75 +0,0 @@
-# Go App Engine packages
-
-[](https://travis-ci.org/golang/appengine)
-
-This repository supports the Go runtime on App Engine,
-including both classic App Engine and Managed VMs.
-It provides APIs for interacting with App Engine services.
-Its canonical import path is `google.golang.org/appengine`.
-
-See https://cloud.google.com/appengine/docs/go/
-for more information.
-
-File issue reports and feature requests on the [Google App Engine issue
-tracker](https://code.google.com/p/googleappengine/issues/entry?template=Go%20defect).
-
-## Directory structure
-The top level directory of this repository is the `appengine` package. It
-contains the
-basic APIs (e.g. `appengine.NewContext`) that apply across APIs. Specific API
-packages are in subdirectories (e.g. `datastore`).
-
-There is an `internal` subdirectory that contains service protocol buffers,
-plus packages required for connectivity to make API calls. App Engine apps
-should not directly import any package under `internal`.
-
-## Updating a Go App Engine app
-
-This section describes how to update a traditional Go App Engine app to use
-these packages.
-
-### 1. Update YAML files (Managed VMs only)
-
-The `app.yaml` file (and YAML files for modules) should have these new lines added:
-```
-vm: true
-manual_scaling:
- instances: 1
-```
-See https://cloud.google.com/appengine/docs/go/modules/#Go_Instance_scaling_and_class for details.
-
-### 2. Update import paths
-
-The import paths for App Engine packages are now fully qualified, based at `google.golang.org/appengine`.
-You will need to update your code to use import paths starting with that; for instance,
-code importing `appengine/datastore` will now need to import `google.golang.org/appengine/datastore`.
-You can do that manually, or by running this command to recursively update all Go source files in the current directory:
-(may require GNU sed)
-```
-sed -i '/"appengine/{s,"appengine,"google.golang.org/appengine,;s,appengine_,appengine/,}' \
- $(find . -name '*.go')
-```
-
-### 3. Update code using deprecated, removed or modified APIs
-
-Most App Engine services are available with exactly the same API.
-A few APIs were cleaned up, and some are not available yet.
-This list summarises the differences:
-
-* `appengine.Context` has been replaced with the `Context` type from `golang.org/x/net/context`.
-* Logging methods that were on `appengine.Context` are now functions in `google.golang.org/appengine/log`.
-* `appengine.Timeout` has been removed. Use `context.WithTimeout` instead.
-* `appengine.Datacenter` now takes a `context.Context` argument.
-* `datastore.PropertyLoadSaver` has been simplified to use slices in place of channels.
-* `delay.Call` now returns an error.
-* `search.FieldLoadSaver` now handles document metadata.
-* `urlfetch.Transport` no longer has a Deadline field; set a deadline on the
- `context.Context` instead.
-* `aetest` no longer declares its own Context type, and uses the standard one instead.
-* `taskqueue.QueueStats` no longer takes a maxTasks argument. That argument has been
- deprecated and unused for a long time.
-* `appengine.BackendHostname` and `appengine.BackendInstance` were for the deprecated backends feature.
- Use `appengine.ModuleHostname`and `appengine.ModuleName` instead.
-* Most of `appengine/file` and parts of `appengine/blobstore` are deprecated.
- Use [Google Cloud Storage](https://godoc.org/google.golang.org/cloud/storage) instead.
-* `appengine/socket` is not required on Managed VMs. Use the standard `net` package instead.
diff --git a/vendor/google.golang.org/appengine/aetest/doc.go b/vendor/google.golang.org/appengine/aetest/doc.go
deleted file mode 100644
index 86ce8c2..0000000
--- a/vendor/google.golang.org/appengine/aetest/doc.go
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-Package aetest provides an API for running dev_appserver for use in tests.
-
-An example test file:
-
- package foo_test
-
- import (
- "testing"
-
- "google.golang.org/appengine/memcache"
- "google.golang.org/appengine/aetest"
- )
-
- func TestFoo(t *testing.T) {
- ctx, done, err := aetest.NewContext()
- if err != nil {
- t.Fatal(err)
- }
- defer done()
-
- it := &memcache.Item{
- Key: "some-key",
- Value: []byte("some-value"),
- }
- err = memcache.Set(ctx, it)
- if err != nil {
- t.Fatalf("Set err: %v", err)
- }
- it, err = memcache.Get(ctx, "some-key")
- if err != nil {
- t.Fatalf("Get err: %v; want no error", err)
- }
- if g, w := string(it.Value), "some-value" ; g != w {
- t.Errorf("retrieved Item.Value = %q, want %q", g, w)
- }
- }
-
-The environment variable APPENGINE_DEV_APPSERVER specifies the location of the
-dev_appserver.py executable to use. If unset, the system PATH is consulted.
-*/
-package aetest
diff --git a/vendor/google.golang.org/appengine/aetest/instance.go b/vendor/google.golang.org/appengine/aetest/instance.go
deleted file mode 100644
index a8f99d8..0000000
--- a/vendor/google.golang.org/appengine/aetest/instance.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package aetest
-
-import (
- "io"
- "net/http"
-
- "golang.org/x/net/context"
- "google.golang.org/appengine"
-)
-
-// Instance represents a running instance of the development API Server.
-type Instance interface {
- // Close kills the child api_server.py process, releasing its resources.
- io.Closer
- // NewRequest returns an *http.Request associated with this instance.
- NewRequest(method, urlStr string, body io.Reader) (*http.Request, error)
-}
-
-// Options is used to specify options when creating an Instance.
-type Options struct {
- // AppID specifies the App ID to use during tests.
- // By default, "testapp".
- AppID string
- // StronglyConsistentDatastore is whether the local datastore should be
- // strongly consistent. This will diverge from production behaviour.
- StronglyConsistentDatastore bool
-}
-
-// NewContext starts an instance of the development API server, and returns
-// a context that will route all API calls to that server, as well as a
-// closure that must be called when the Context is no longer required.
-func NewContext() (context.Context, func(), error) {
- inst, err := NewInstance(nil)
- if err != nil {
- return nil, nil, err
- }
- req, err := inst.NewRequest("GET", "/", nil)
- if err != nil {
- inst.Close()
- return nil, nil, err
- }
- ctx := appengine.NewContext(req)
- return ctx, func() {
- inst.Close()
- }, nil
-}
-
-// PrepareDevAppserver is a hook which, if set, will be called before the
-// dev_appserver.py is started, each time it is started. If aetest.NewContext
-// is invoked from the goapp test tool, this hook is unnecessary.
-var PrepareDevAppserver func() error
diff --git a/vendor/google.golang.org/appengine/aetest/instance_classic.go b/vendor/google.golang.org/appengine/aetest/instance_classic.go
deleted file mode 100644
index fbceaa5..0000000
--- a/vendor/google.golang.org/appengine/aetest/instance_classic.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// +build appengine
-
-package aetest
-
-import "appengine/aetest"
-
-// NewInstance launches a running instance of api_server.py which can be used
-// for multiple test Contexts that delegate all App Engine API calls to that
-// instance.
-// If opts is nil the default values are used.
-func NewInstance(opts *Options) (Instance, error) {
- aetest.PrepareDevAppserver = PrepareDevAppserver
- var aeOpts *aetest.Options
- if opts != nil {
- aeOpts = &aetest.Options{
- AppID: opts.AppID,
- StronglyConsistentDatastore: opts.StronglyConsistentDatastore,
- }
- }
- return aetest.NewInstance(aeOpts)
-}
diff --git a/vendor/google.golang.org/appengine/aetest/instance_vm.go b/vendor/google.golang.org/appengine/aetest/instance_vm.go
deleted file mode 100644
index ee81480..0000000
--- a/vendor/google.golang.org/appengine/aetest/instance_vm.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// +build !appengine
-
-package aetest
-
-import (
- "bufio"
- "crypto/rand"
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "net/url"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "time"
-
- "golang.org/x/net/context"
- "google.golang.org/appengine/internal"
-)
-
-// NewInstance launches a running instance of api_server.py which can be used
-// for multiple test Contexts that delegate all App Engine API calls to that
-// instance.
-// If opts is nil the default values are used.
-func NewInstance(opts *Options) (Instance, error) {
- i := &instance{
- opts: opts,
- appID: "testapp",
- }
- if opts != nil && opts.AppID != "" {
- i.appID = opts.AppID
- }
- if err := i.startChild(); err != nil {
- return nil, err
- }
- return i, nil
-}
-
-func newSessionID() string {
- var buf [16]byte
- io.ReadFull(rand.Reader, buf[:])
- return fmt.Sprintf("%x", buf[:])
-}
-
-// instance implements the Instance interface.
-type instance struct {
- opts *Options
- child *exec.Cmd
- apiURL *url.URL // base URL of API HTTP server
- adminURL string // base URL of admin HTTP server
- appDir string
- appID string
- relFuncs []func() // funcs to release any associated contexts
-}
-
-// NewRequest returns an *http.Request associated with this instance.
-func (i *instance) NewRequest(method, urlStr string, body io.Reader) (*http.Request, error) {
- req, err := http.NewRequest(method, urlStr, body)
- if err != nil {
- return nil, err
- }
-
- // Associate this request.
- release := internal.RegisterTestRequest(req, i.apiURL, func(ctx context.Context) context.Context {
- ctx = internal.WithAppIDOverride(ctx, "dev~"+i.appID)
- return ctx
- })
- i.relFuncs = append(i.relFuncs, release)
-
- return req, nil
-}
-
-// Close kills the child api_server.py process, releasing its resources.
-func (i *instance) Close() (err error) {
- for _, rel := range i.relFuncs {
- rel()
- }
- i.relFuncs = nil
- if i.child == nil {
- return nil
- }
- defer func() {
- i.child = nil
- err1 := os.RemoveAll(i.appDir)
- if err == nil {
- err = err1
- }
- }()
-
- if p := i.child.Process; p != nil {
- errc := make(chan error, 1)
- go func() {
- errc <- i.child.Wait()
- }()
-
- // Call the quit handler on the admin server.
- res, err := http.Get(i.adminURL + "/quit")
- if err != nil {
- p.Kill()
- return fmt.Errorf("unable to call /quit handler: %v", err)
- }
- res.Body.Close()
-
- select {
- case <-time.After(15 * time.Second):
- p.Kill()
- return errors.New("timeout killing child process")
- case err = <-errc:
- // Do nothing.
- }
- }
- return
-}
-
-func fileExists(path string) bool {
- _, err := os.Stat(path)
- return err == nil
-}
-
-func findPython() (path string, err error) {
- for _, name := range []string{"python2.7", "python"} {
- path, err = exec.LookPath(name)
- if err == nil {
- return
- }
- }
- return
-}
-
-func findDevAppserver() (string, error) {
- if p := os.Getenv("APPENGINE_DEV_APPSERVER"); p != "" {
- if fileExists(p) {
- return p, nil
- }
- return "", fmt.Errorf("invalid APPENGINE_DEV_APPSERVER environment variable; path %q doesn't exist", p)
- }
- return exec.LookPath("dev_appserver.py")
-}
-
-var apiServerAddrRE = regexp.MustCompile(`Starting API server at: (\S+)`)
-var adminServerAddrRE = regexp.MustCompile(`Starting admin server at: (\S+)`)
-
-func (i *instance) startChild() (err error) {
- if PrepareDevAppserver != nil {
- if err := PrepareDevAppserver(); err != nil {
- return err
- }
- }
- python, err := findPython()
- if err != nil {
- return fmt.Errorf("Could not find python interpreter: %v", err)
- }
- devAppserver, err := findDevAppserver()
- if err != nil {
- return fmt.Errorf("Could not find dev_appserver.py: %v", err)
- }
-
- i.appDir, err = ioutil.TempDir("", "appengine-aetest")
- if err != nil {
- return err
- }
- defer func() {
- if err != nil {
- os.RemoveAll(i.appDir)
- }
- }()
- err = os.Mkdir(filepath.Join(i.appDir, "app"), 0755)
- if err != nil {
- return err
- }
- err = ioutil.WriteFile(filepath.Join(i.appDir, "app", "app.yaml"), []byte(i.appYAML()), 0644)
- if err != nil {
- return err
- }
- err = ioutil.WriteFile(filepath.Join(i.appDir, "app", "stubapp.go"), []byte(appSource), 0644)
- if err != nil {
- return err
- }
-
- appserverArgs := []string{
- devAppserver,
- "--port=0",
- "--api_port=0",
- "--admin_port=0",
- "--automatic_restart=false",
- "--skip_sdk_update_check=true",
- "--clear_datastore=true",
- "--clear_search_indexes=true",
- "--datastore_path", filepath.Join(i.appDir, "datastore"),
- }
- if i.opts != nil && i.opts.StronglyConsistentDatastore {
- appserverArgs = append(appserverArgs, "--datastore_consistency_policy=consistent")
- }
- appserverArgs = append(appserverArgs, filepath.Join(i.appDir, "app"))
-
- i.child = exec.Command(python,
- appserverArgs...,
- )
- i.child.Stdout = os.Stdout
- var stderr io.Reader
- stderr, err = i.child.StderrPipe()
- if err != nil {
- return err
- }
- stderr = io.TeeReader(stderr, os.Stderr)
- if err = i.child.Start(); err != nil {
- return err
- }
-
- // Read stderr until we have read the URLs of the API server and admin interface.
- errc := make(chan error, 1)
- go func() {
- s := bufio.NewScanner(stderr)
- for s.Scan() {
- if match := apiServerAddrRE.FindStringSubmatch(s.Text()); match != nil {
- u, err := url.Parse(match[1])
- if err != nil {
- errc <- fmt.Errorf("failed to parse API URL %q: %v", match[1], err)
- return
- }
- i.apiURL = u
- }
- if match := adminServerAddrRE.FindStringSubmatch(s.Text()); match != nil {
- i.adminURL = match[1]
- }
- if i.adminURL != "" && i.apiURL != nil {
- break
- }
- }
- errc <- s.Err()
- }()
-
- select {
- case <-time.After(15 * time.Second):
- if p := i.child.Process; p != nil {
- p.Kill()
- }
- return errors.New("timeout starting child process")
- case err := <-errc:
- if err != nil {
- return fmt.Errorf("error reading child process stderr: %v", err)
- }
- }
- if i.adminURL == "" {
- return errors.New("unable to find admin server URL")
- }
- if i.apiURL == nil {
- return errors.New("unable to find API server URL")
- }
- return nil
-}
-
-func (i *instance) appYAML() string {
- return fmt.Sprintf(appYAMLTemplate, i.appID)
-}
-
-const appYAMLTemplate = `
-application: %s
-version: 1
-runtime: go
-api_version: go1
-vm: true
-
-handlers:
-- url: /.*
- script: _go_app
-`
-
-const appSource = `
-package main
-import "google.golang.org/appengine"
-func main() { appengine.Main() }
-`
diff --git a/vendor/google.golang.org/appengine/aetest/user.go b/vendor/google.golang.org/appengine/aetest/user.go
deleted file mode 100644
index bf9266f..0000000
--- a/vendor/google.golang.org/appengine/aetest/user.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package aetest
-
-import (
- "hash/crc32"
- "net/http"
- "strconv"
-
- "google.golang.org/appengine/user"
-)
-
-// Login causes the provided Request to act as though issued by the given user.
-func Login(u *user.User, req *http.Request) {
- req.Header.Set("X-AppEngine-User-Email", u.Email)
- id := u.ID
- if id == "" {
- id = strconv.Itoa(int(crc32.Checksum([]byte(u.Email), crc32.IEEETable)))
- }
- req.Header.Set("X-AppEngine-User-Id", id)
- req.Header.Set("X-AppEngine-User-Federated-Identity", u.Email)
- req.Header.Set("X-AppEngine-User-Federated-Provider", u.FederatedProvider)
- if u.Admin {
- req.Header.Set("X-AppEngine-User-Is-Admin", "1")
- } else {
- req.Header.Set("X-AppEngine-User-Is-Admin", "0")
- }
-}
-
-// Logout causes the provided Request to act as though issued by a logged-out
-// user.
-func Logout(req *http.Request) {
- req.Header.Del("X-AppEngine-User-Email")
- req.Header.Del("X-AppEngine-User-Id")
- req.Header.Del("X-AppEngine-User-Is-Admin")
- req.Header.Del("X-AppEngine-User-Federated-Identity")
- req.Header.Del("X-AppEngine-User-Federated-Provider")
-}
diff --git a/vendor/google.golang.org/appengine/appengine.go b/vendor/google.golang.org/appengine/appengine.go
deleted file mode 100644
index 52e6ee3..0000000
--- a/vendor/google.golang.org/appengine/appengine.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package appengine provides basic functionality for Google App Engine.
-//
-// For more information on how to write Go apps for Google App Engine, see:
-// https://cloud.google.com/appengine/docs/go/
-package appengine
-
-import (
- "net/http"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// IsDevAppServer reports whether the App Engine app is running in the
-// development App Server.
-func IsDevAppServer() bool {
- return internal.IsDevAppServer()
-}
-
-// NewContext returns a context for an in-flight HTTP request.
-// This function is cheap.
-func NewContext(req *http.Request) context.Context {
- return WithContext(context.Background(), req)
-}
-
-// WithContext returns a copy of the parent context
-// and associates it with an in-flight HTTP request.
-// This function is cheap.
-func WithContext(parent context.Context, req *http.Request) context.Context {
- return internal.WithContext(parent, req)
-}
-
-// TODO(dsymonds): Add a Call function here? Otherwise other packages can't access internal.Call.
-
-// BlobKey is a key for a blobstore blob.
-//
-// Conceptually, this type belongs in the blobstore package, but it lives in
-// the appengine package to avoid a circular dependency: blobstore depends on
-// datastore, and datastore needs to refer to the BlobKey type.
-type BlobKey string
-
-// GeoPoint represents a location as latitude/longitude in degrees.
-type GeoPoint struct {
- Lat, Lng float64
-}
-
-// Valid returns whether a GeoPoint is within [-90, 90] latitude and [-180, 180] longitude.
-func (g GeoPoint) Valid() bool {
- return -90 <= g.Lat && g.Lat <= 90 && -180 <= g.Lng && g.Lng <= 180
-}
-
-// APICallFunc defines a function type for handling an API call.
-// See WithCallOverride.
-type APICallFunc func(ctx context.Context, service, method string, in, out proto.Message) error
-
-// WithCallOverride returns a copy of the parent context
-// that will cause API calls to invoke f instead of their normal operation.
-//
-// This is intended for advanced users only.
-func WithAPICallFunc(ctx context.Context, f APICallFunc) context.Context {
- return internal.WithCallOverride(ctx, internal.CallOverrideFunc(f))
-}
-
-// APICall performs an API call.
-//
-// This is not intended for general use; it is exported for use in conjunction
-// with WithAPICallFunc.
-func APICall(ctx context.Context, service, method string, in, out proto.Message) error {
- return internal.Call(ctx, service, method, in, out)
-}
diff --git a/vendor/google.golang.org/appengine/appengine_vm.go b/vendor/google.golang.org/appengine/appengine_vm.go
deleted file mode 100644
index 2f77590..0000000
--- a/vendor/google.golang.org/appengine/appengine_vm.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package appengine
-
-import (
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// The comment below must not be changed.
-// It is used by go-app-builder to recognise that this package has
-// the Main function to use in the synthetic main.
-// The gophers party all night; the rabbits provide the beats.
-
-// Main is the principal entry point for a Managed VMs app.
-// It installs a trivial health checker if one isn't already registered,
-// and starts listening on port 8080 (overridden by the $PORT environment
-// variable).
-//
-// See https://cloud.google.com/appengine/docs/managed-vms/custom-runtimes#health_check_requests
-// for details on how to do your own health checking.
-//
-// Main never returns.
-//
-// Main is designed so that the app's main package looks like this:
-//
-// package main
-//
-// import (
-// "google.golang.org/appengine"
-//
-// _ "myapp/package0"
-// _ "myapp/package1"
-// )
-//
-// func main() {
-// appengine.Main()
-// }
-//
-// The "myapp/packageX" packages are expected to register HTTP handlers
-// in their init functions.
-func Main() {
- internal.Main()
-}
-
-// BackgroundContext returns a context not associated with a request.
-// This should only be used when not servicing a request.
-// This only works on Managed VMs.
-func BackgroundContext() context.Context {
- return internal.BackgroundContext()
-}
diff --git a/vendor/google.golang.org/appengine/blobstore/blobstore.go b/vendor/google.golang.org/appengine/blobstore/blobstore.go
deleted file mode 100644
index e9f3df9..0000000
--- a/vendor/google.golang.org/appengine/blobstore/blobstore.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package blobstore provides a client for App Engine's persistent blob
-// storage service.
-package blobstore
-
-import (
- "bufio"
- "encoding/base64"
- "fmt"
- "io"
- "io/ioutil"
- "mime"
- "mime/multipart"
- "net/http"
- "net/textproto"
- "net/url"
- "strconv"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/datastore"
- "google.golang.org/appengine/internal"
-
- basepb "google.golang.org/appengine/internal/base"
- blobpb "google.golang.org/appengine/internal/blobstore"
-)
-
-const (
- blobInfoKind = "__BlobInfo__"
- blobFileIndexKind = "__BlobFileIndex__"
- zeroKey = appengine.BlobKey("")
-)
-
-// BlobInfo is the blob metadata that is stored in the datastore.
-// Filename may be empty.
-type BlobInfo struct {
- BlobKey appengine.BlobKey
- ContentType string `datastore:"content_type"`
- CreationTime time.Time `datastore:"creation"`
- Filename string `datastore:"filename"`
- Size int64 `datastore:"size"`
- MD5 string `datastore:"md5_hash"`
-
- // ObjectName is the Google Cloud Storage name for this blob.
- ObjectName string `datastore:"gs_object_name"`
-}
-
-// isErrFieldMismatch returns whether err is a datastore.ErrFieldMismatch.
-//
-// The blobstore stores blob metadata in the datastore. When loading that
-// metadata, it may contain fields that we don't care about. datastore.Get will
-// return datastore.ErrFieldMismatch in that case, so we ignore that specific
-// error.
-func isErrFieldMismatch(err error) bool {
- _, ok := err.(*datastore.ErrFieldMismatch)
- return ok
-}
-
-// Stat returns the BlobInfo for a provided blobKey. If no blob was found for
-// that key, Stat returns datastore.ErrNoSuchEntity.
-func Stat(c context.Context, blobKey appengine.BlobKey) (*BlobInfo, error) {
- c, _ = appengine.Namespace(c, "") // Blobstore is always in the empty string namespace
- dskey := datastore.NewKey(c, blobInfoKind, string(blobKey), 0, nil)
- bi := &BlobInfo{
- BlobKey: blobKey,
- }
- if err := datastore.Get(c, dskey, bi); err != nil && !isErrFieldMismatch(err) {
- return nil, err
- }
- return bi, nil
-}
-
-// Send sets the headers on response to instruct App Engine to send a blob as
-// the response body. This is more efficient than reading and writing it out
-// manually and isn't subject to normal response size limits.
-func Send(response http.ResponseWriter, blobKey appengine.BlobKey) {
- hdr := response.Header()
- hdr.Set("X-AppEngine-BlobKey", string(blobKey))
-
- if hdr.Get("Content-Type") == "" {
- // This value is known to dev_appserver to mean automatic.
- // In production this is remapped to the empty value which
- // means automatic.
- hdr.Set("Content-Type", "application/vnd.google.appengine.auto")
- }
-}
-
-// UploadURL creates an upload URL for the form that the user will
-// fill out, passing the application path to load when the POST of the
-// form is completed. These URLs expire and should not be reused. The
-// opts parameter may be nil.
-func UploadURL(c context.Context, successPath string, opts *UploadURLOptions) (*url.URL, error) {
- req := &blobpb.CreateUploadURLRequest{
- SuccessPath: proto.String(successPath),
- }
- if opts != nil {
- if n := opts.MaxUploadBytes; n != 0 {
- req.MaxUploadSizeBytes = &n
- }
- if n := opts.MaxUploadBytesPerBlob; n != 0 {
- req.MaxUploadSizePerBlobBytes = &n
- }
- if s := opts.StorageBucket; s != "" {
- req.GsBucketName = &s
- }
- }
- res := &blobpb.CreateUploadURLResponse{}
- if err := internal.Call(c, "blobstore", "CreateUploadURL", req, res); err != nil {
- return nil, err
- }
- return url.Parse(*res.Url)
-}
-
-// UploadURLOptions are the options to create an upload URL.
-type UploadURLOptions struct {
- MaxUploadBytes int64 // optional
- MaxUploadBytesPerBlob int64 // optional
-
- // StorageBucket specifies the Google Cloud Storage bucket in which
- // to store the blob.
- // This is required if you use Cloud Storage instead of Blobstore.
- // Your application must have permission to write to the bucket.
- // You may optionally specify a bucket name and path in the format
- // "bucket_name/path", in which case the included path will be the
- // prefix of the uploaded object's name.
- StorageBucket string
-}
-
-// Delete deletes a blob.
-func Delete(c context.Context, blobKey appengine.BlobKey) error {
- return DeleteMulti(c, []appengine.BlobKey{blobKey})
-}
-
-// DeleteMulti deletes multiple blobs.
-func DeleteMulti(c context.Context, blobKey []appengine.BlobKey) error {
- s := make([]string, len(blobKey))
- for i, b := range blobKey {
- s[i] = string(b)
- }
- req := &blobpb.DeleteBlobRequest{
- BlobKey: s,
- }
- res := &basepb.VoidProto{}
- if err := internal.Call(c, "blobstore", "DeleteBlob", req, res); err != nil {
- return err
- }
- return nil
-}
-
-func errorf(format string, args ...interface{}) error {
- return fmt.Errorf("blobstore: "+format, args...)
-}
-
-// ParseUpload parses the synthetic POST request that your app gets from
-// App Engine after a user's successful upload of blobs. Given the request,
-// ParseUpload returns a map of the blobs received (keyed by HTML form
-// element name) and other non-blob POST parameters.
-func ParseUpload(req *http.Request) (blobs map[string][]*BlobInfo, other url.Values, err error) {
- _, params, err := mime.ParseMediaType(req.Header.Get("Content-Type"))
- if err != nil {
- return nil, nil, err
- }
- boundary := params["boundary"]
- if boundary == "" {
- return nil, nil, errorf("did not find MIME multipart boundary")
- }
-
- blobs = make(map[string][]*BlobInfo)
- other = make(url.Values)
-
- mreader := multipart.NewReader(io.MultiReader(req.Body, strings.NewReader("\r\n\r\n")), boundary)
- for {
- part, perr := mreader.NextPart()
- if perr == io.EOF {
- break
- }
- if perr != nil {
- return nil, nil, errorf("error reading next mime part with boundary %q (len=%d): %v",
- boundary, len(boundary), perr)
- }
-
- bi := &BlobInfo{}
- ctype, params, err := mime.ParseMediaType(part.Header.Get("Content-Disposition"))
- if err != nil {
- return nil, nil, err
- }
- bi.Filename = params["filename"]
- formKey := params["name"]
-
- ctype, params, err = mime.ParseMediaType(part.Header.Get("Content-Type"))
- if err != nil {
- return nil, nil, err
- }
- bi.BlobKey = appengine.BlobKey(params["blob-key"])
- if ctype != "message/external-body" || bi.BlobKey == "" {
- if formKey != "" {
- slurp, serr := ioutil.ReadAll(part)
- if serr != nil {
- return nil, nil, errorf("error reading %q MIME part", formKey)
- }
- other[formKey] = append(other[formKey], string(slurp))
- }
- continue
- }
-
- // App Engine sends a MIME header as the body of each MIME part.
- tp := textproto.NewReader(bufio.NewReader(part))
- header, mimeerr := tp.ReadMIMEHeader()
- if mimeerr != nil {
- return nil, nil, mimeerr
- }
- bi.Size, err = strconv.ParseInt(header.Get("Content-Length"), 10, 64)
- if err != nil {
- return nil, nil, err
- }
- bi.ContentType = header.Get("Content-Type")
-
- // Parse the time from the MIME header like:
- // X-AppEngine-Upload-Creation: 2011-03-15 21:38:34.712136
- createDate := header.Get("X-AppEngine-Upload-Creation")
- if createDate == "" {
- return nil, nil, errorf("expected to find an X-AppEngine-Upload-Creation header")
- }
- bi.CreationTime, err = time.Parse("2006-01-02 15:04:05.000000", createDate)
- if err != nil {
- return nil, nil, errorf("error parsing X-AppEngine-Upload-Creation: %s", err)
- }
-
- if hdr := header.Get("Content-MD5"); hdr != "" {
- md5, err := base64.URLEncoding.DecodeString(hdr)
- if err != nil {
- return nil, nil, errorf("bad Content-MD5 %q: %v", hdr, err)
- }
- bi.MD5 = string(md5)
- }
-
- // If the GCS object name was provided, record it.
- bi.ObjectName = header.Get("X-AppEngine-Cloud-Storage-Object")
-
- blobs[formKey] = append(blobs[formKey], bi)
- }
- return
-}
-
-// Reader is a blob reader.
-type Reader interface {
- io.Reader
- io.ReaderAt
- io.Seeker
-}
-
-// NewReader returns a reader for a blob. It always succeeds; if the blob does
-// not exist then an error will be reported upon first read.
-func NewReader(c context.Context, blobKey appengine.BlobKey) Reader {
- return openBlob(c, blobKey)
-}
-
-// BlobKeyForFile returns a BlobKey for a Google Storage file.
-// The filename should be of the form "/gs/bucket_name/object_name".
-func BlobKeyForFile(c context.Context, filename string) (appengine.BlobKey, error) {
- req := &blobpb.CreateEncodedGoogleStorageKeyRequest{
- Filename: &filename,
- }
- res := &blobpb.CreateEncodedGoogleStorageKeyResponse{}
- if err := internal.Call(c, "blobstore", "CreateEncodedGoogleStorageKey", req, res); err != nil {
- return "", err
- }
- return appengine.BlobKey(*res.BlobKey), nil
-}
diff --git a/vendor/google.golang.org/appengine/blobstore/read.go b/vendor/google.golang.org/appengine/blobstore/read.go
deleted file mode 100644
index 578b1f5..0000000
--- a/vendor/google.golang.org/appengine/blobstore/read.go
+++ /dev/null
@@ -1,160 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package blobstore
-
-import (
- "errors"
- "fmt"
- "io"
- "os"
- "sync"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
-
- blobpb "google.golang.org/appengine/internal/blobstore"
-)
-
-// openBlob returns a reader for a blob. It always succeeds; if the blob does
-// not exist then an error will be reported upon first read.
-func openBlob(c context.Context, blobKey appengine.BlobKey) Reader {
- return &reader{
- c: c,
- blobKey: blobKey,
- }
-}
-
-const readBufferSize = 256 * 1024
-
-// reader is a blob reader. It implements the Reader interface.
-type reader struct {
- c context.Context
-
- // Either blobKey or filename is set:
- blobKey appengine.BlobKey
- filename string
-
- closeFunc func() // is nil if unavailable or already closed.
-
- // buf is the read buffer. r is how much of buf has been read.
- // off is the offset of buf[0] relative to the start of the blob.
- // An invariant is 0 <= r && r <= len(buf).
- // Reads that don't require an RPC call will increment r but not off.
- // Seeks may modify r without discarding the buffer, but only if the
- // invariant can be maintained.
- mu sync.Mutex
- buf []byte
- r int
- off int64
-}
-
-func (r *reader) Close() error {
- if f := r.closeFunc; f != nil {
- f()
- }
- r.closeFunc = nil
- return nil
-}
-
-func (r *reader) Read(p []byte) (int, error) {
- if len(p) == 0 {
- return 0, nil
- }
- r.mu.Lock()
- defer r.mu.Unlock()
- if r.r == len(r.buf) {
- if err := r.fetch(r.off + int64(r.r)); err != nil {
- return 0, err
- }
- }
- n := copy(p, r.buf[r.r:])
- r.r += n
- return n, nil
-}
-
-func (r *reader) ReadAt(p []byte, off int64) (int, error) {
- if len(p) == 0 {
- return 0, nil
- }
- r.mu.Lock()
- defer r.mu.Unlock()
- // Convert relative offsets to absolute offsets.
- ab0 := r.off + int64(r.r)
- ab1 := r.off + int64(len(r.buf))
- ap0 := off
- ap1 := off + int64(len(p))
- // Check if we can satisfy the read entirely out of the existing buffer.
- if r.off <= ap0 && ap1 <= ab1 {
- // Convert off from an absolute offset to a relative offset.
- rp0 := int(ap0 - r.off)
- return copy(p, r.buf[rp0:]), nil
- }
- // Restore the original Read/Seek offset after ReadAt completes.
- defer r.seek(ab0)
- // Repeatedly fetch and copy until we have filled p.
- n := 0
- for len(p) > 0 {
- if err := r.fetch(off + int64(n)); err != nil {
- return n, err
- }
- r.r = copy(p, r.buf)
- n += r.r
- p = p[r.r:]
- }
- return n, nil
-}
-
-func (r *reader) Seek(offset int64, whence int) (ret int64, err error) {
- r.mu.Lock()
- defer r.mu.Unlock()
- switch whence {
- case os.SEEK_SET:
- ret = offset
- case os.SEEK_CUR:
- ret = r.off + int64(r.r) + offset
- case os.SEEK_END:
- return 0, errors.New("seeking relative to the end of a blob isn't supported")
- default:
- return 0, fmt.Errorf("invalid Seek whence value: %d", whence)
- }
- if ret < 0 {
- return 0, errors.New("negative Seek offset")
- }
- return r.seek(ret)
-}
-
-// fetch fetches readBufferSize bytes starting at the given offset. On success,
-// the data is saved as r.buf.
-func (r *reader) fetch(off int64) error {
- req := &blobpb.FetchDataRequest{
- BlobKey: proto.String(string(r.blobKey)),
- StartIndex: proto.Int64(off),
- EndIndex: proto.Int64(off + readBufferSize - 1), // EndIndex is inclusive.
- }
- res := &blobpb.FetchDataResponse{}
- if err := internal.Call(r.c, "blobstore", "FetchData", req, res); err != nil {
- return err
- }
- if len(res.Data) == 0 {
- return io.EOF
- }
- r.buf, r.r, r.off = res.Data, 0, off
- return nil
-}
-
-// seek seeks to the given offset with an effective whence equal to SEEK_SET.
-// It discards the read buffer if the invariant cannot be maintained.
-func (r *reader) seek(off int64) (int64, error) {
- delta := off - r.off
- if delta >= 0 && delta < int64(len(r.buf)) {
- r.r = int(delta)
- return off, nil
- }
- r.buf, r.r, r.off = nil, 0, off
- return off, nil
-}
diff --git a/vendor/google.golang.org/appengine/capability/capability.go b/vendor/google.golang.org/appengine/capability/capability.go
deleted file mode 100644
index 26edad7..0000000
--- a/vendor/google.golang.org/appengine/capability/capability.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package capability exposes information about outages and scheduled downtime
-for specific API capabilities.
-
-This package does not work on Managed VMs.
-
-Example:
- if !capability.Enabled(c, "datastore_v3", "write") {
- // show user a different page
- }
-*/
-package capability
-
-import (
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- "google.golang.org/appengine/log"
-
- pb "google.golang.org/appengine/internal/capability"
-)
-
-// Enabled returns whether an API's capabilities are enabled.
-// The wildcard "*" capability matches every capability of an API.
-// If the underlying RPC fails (if the package is unknown, for example),
-// false is returned and information is written to the application log.
-func Enabled(ctx context.Context, api, capability string) bool {
- req := &pb.IsEnabledRequest{
- Package: &api,
- Capability: []string{capability},
- }
- res := &pb.IsEnabledResponse{}
- if err := internal.Call(ctx, "capability_service", "IsEnabled", req, res); err != nil {
- log.Warningf(ctx, "capability.Enabled: RPC failed: %v", err)
- return false
- }
- switch *res.SummaryStatus {
- case pb.IsEnabledResponse_ENABLED,
- pb.IsEnabledResponse_SCHEDULED_FUTURE,
- pb.IsEnabledResponse_SCHEDULED_NOW:
- return true
- case pb.IsEnabledResponse_UNKNOWN:
- log.Errorf(ctx, "capability.Enabled: unknown API capability %s/%s", api, capability)
- return false
- default:
- return false
- }
-}
diff --git a/vendor/google.golang.org/appengine/channel/channel.go b/vendor/google.golang.org/appengine/channel/channel.go
deleted file mode 100644
index 004f5dd..0000000
--- a/vendor/google.golang.org/appengine/channel/channel.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package channel implements the server side of App Engine's Channel API.
-
-Create creates a new channel associated with the given clientID,
-which must be unique to the client that will use the returned token.
-
- token, err := channel.Create(c, "player1")
- if err != nil {
- // handle error
- }
- // return token to the client in an HTTP response
-
-Send sends a message to the client over the channel identified by clientID.
-
- channel.Send(c, "player1", "Game over!")
-*/
-package channel
-
-import (
- "encoding/json"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- basepb "google.golang.org/appengine/internal/base"
- pb "google.golang.org/appengine/internal/channel"
-)
-
-// Create creates a channel and returns a token for use by the client.
-// The clientID is an application-provided string used to identify the client.
-func Create(c context.Context, clientID string) (token string, err error) {
- req := &pb.CreateChannelRequest{
- ApplicationKey: &clientID,
- }
- resp := &pb.CreateChannelResponse{}
- err = internal.Call(c, service, "CreateChannel", req, resp)
- token = resp.GetToken()
- return token, remapError(err)
-}
-
-// Send sends a message on the channel associated with clientID.
-func Send(c context.Context, clientID, message string) error {
- req := &pb.SendMessageRequest{
- ApplicationKey: &clientID,
- Message: &message,
- }
- resp := &basepb.VoidProto{}
- return remapError(internal.Call(c, service, "SendChannelMessage", req, resp))
-}
-
-// SendJSON is a helper function that sends a JSON-encoded value
-// on the channel associated with clientID.
-func SendJSON(c context.Context, clientID string, value interface{}) error {
- m, err := json.Marshal(value)
- if err != nil {
- return err
- }
- return Send(c, clientID, string(m))
-}
-
-// remapError fixes any APIError referencing "xmpp" into one referencing "channel".
-func remapError(err error) error {
- if e, ok := err.(*internal.APIError); ok {
- if e.Service == "xmpp" {
- e.Service = "channel"
- }
- }
- return err
-}
-
-var service = "xmpp" // prod
-
-func init() {
- if appengine.IsDevAppServer() {
- service = "channel" // dev
- }
- internal.RegisterErrorCodeMap("channel", pb.ChannelServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/cloudsql/cloudsql.go b/vendor/google.golang.org/appengine/cloudsql/cloudsql.go
deleted file mode 100644
index 795fba1..0000000
--- a/vendor/google.golang.org/appengine/cloudsql/cloudsql.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package cloudsql exposes access to Google Cloud SQL databases.
-
-This package does not work on Managed VMs.
-
-This package is intended for MySQL drivers to make App Engine-specific
-connections. Applications should use this package through database/sql:
-Select a pure Go MySQL driver that supports this package, and use sql.Open
-with protocol "cloudsql" and an address of the Cloud SQL instance.
-
-A Go MySQL driver that has been tested to work well with Cloud SQL
-is the go-sql-driver:
- import "database/sql"
- import _ "github.com/go-sql-driver/mysql"
-
- db, err := sql.Open("mysql", "user@cloudsql(project-id:instance-name)/dbname")
-
-
-Another driver that works well with Cloud SQL is the mymysql driver:
- import "database/sql"
- import _ "github.com/ziutek/mymysql/godrv"
-
- db, err := sql.Open("mymysql", "cloudsql:instance-name*dbname/user/password")
-
-
-Using either of these drivers, you can perform a standard SQL query.
-This example assumes there is a table named 'users' with
-columns 'first_name' and 'last_name':
-
- rows, err := db.Query("SELECT first_name, last_name FROM users")
- if err != nil {
- log.Errorf(ctx, "db.Query: %v", err)
- }
- defer rows.Close()
-
- for rows.Next() {
- var firstName string
- var lastName string
- if err := rows.Scan(&firstName, &lastName); err != nil {
- log.Errorf(ctx, "rows.Scan: %v", err)
- continue
- }
- log.Infof(ctx, "First: %v - Last: %v", firstName, lastName)
- }
- if err := rows.Err(); err != nil {
- log.Errorf(ctx, "Row error: %v", err)
- }
-*/
-package cloudsql
-
-import (
- "net"
-)
-
-// Dial connects to the named Cloud SQL instance.
-func Dial(instance string) (net.Conn, error) {
- return connect(instance)
-}
diff --git a/vendor/google.golang.org/appengine/cloudsql/cloudsql_classic.go b/vendor/google.golang.org/appengine/cloudsql/cloudsql_classic.go
deleted file mode 100644
index af62dba..0000000
--- a/vendor/google.golang.org/appengine/cloudsql/cloudsql_classic.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build appengine
-
-package cloudsql
-
-import (
- "net"
-
- "appengine/cloudsql"
-)
-
-func connect(instance string) (net.Conn, error) {
- return cloudsql.Dial(instance)
-}
diff --git a/vendor/google.golang.org/appengine/cloudsql/cloudsql_vm.go b/vendor/google.golang.org/appengine/cloudsql/cloudsql_vm.go
deleted file mode 100644
index c3f76f2..0000000
--- a/vendor/google.golang.org/appengine/cloudsql/cloudsql_vm.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package cloudsql
-
-import (
- "errors"
- "net"
-)
-
-func connect(instance string) (net.Conn, error) {
- return nil, errors.New("cloudsql: not supported in Managed VMs")
-}
diff --git a/vendor/google.golang.org/appengine/cmd/aebundler/aebundler.go b/vendor/google.golang.org/appengine/cmd/aebundler/aebundler.go
deleted file mode 100644
index 9080ce2..0000000
--- a/vendor/google.golang.org/appengine/cmd/aebundler/aebundler.go
+++ /dev/null
@@ -1,342 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Program aebundler turns a Go app into a fully self-contained tar file.
-// The app and its subdirectories (if any) are placed under "."
-// and the dependencies from $GOPATH are placed under ./_gopath/src.
-// A main func is synthesized if one does not exist.
-//
-// A sample Dockerfile to be used with this bundler could look like this:
-// FROM gcr.io/google_appengine/go-compat
-// ADD . /app
-// RUN GOPATH=/app/_gopath go build -tags appenginevm -o /app/_ah/exe
-package main
-
-import (
- "archive/tar"
- "flag"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
-)
-
-var (
- output = flag.String("o", "", "name of output tar file or '-' for stdout")
- rootDir = flag.String("root", ".", "directory name of application root")
- vm = flag.Bool("vm", true, "bundle a Managed VM app")
-
- skipFiles = map[string]bool{
- ".git": true,
- ".gitconfig": true,
- ".hg": true,
- ".travis.yml": true,
- }
-)
-
-const (
- newMain = `package main
-import "google.golang.org/appengine"
-func main() {
- appengine.Main()
-}
-`
-)
-
-func usage() {
- fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
- fmt.Fprintf(os.Stderr, "\t%s -o \tBundle app to named tar file or stdout\n", os.Args[0])
- fmt.Fprintf(os.Stderr, "\noptional arguments:\n")
- flag.PrintDefaults()
-}
-
-func main() {
- flag.Usage = usage
- flag.Parse()
-
- var tags []string
- if *vm {
- tags = append(tags, "appenginevm")
- } else {
- tags = append(tags, "appengine")
- }
-
- tarFile := *output
- if tarFile == "" {
- usage()
- errorf("Required -o flag not specified.")
- }
-
- app, err := analyze(tags)
- if err != nil {
- errorf("Error analyzing app: %v", err)
- }
- if err := app.bundle(tarFile); err != nil {
- errorf("Unable to bundle app: %v", err)
- }
-}
-
-// errorf prints the error message and exits.
-func errorf(format string, a ...interface{}) {
- fmt.Fprintf(os.Stderr, "aebundler: "+format+"\n", a...)
- os.Exit(1)
-}
-
-type app struct {
- hasMain bool
- appFiles []string
- imports map[string]string
-}
-
-// analyze checks the app for building with the given build tags and returns hasMain,
-// app files, and a map of full directory import names to original import names.
-func analyze(tags []string) (*app, error) {
- ctxt := buildContext(tags)
- hasMain, appFiles, err := checkMain(ctxt)
- if err != nil {
- return nil, err
- }
- gopath := filepath.SplitList(ctxt.GOPATH)
- im, err := imports(ctxt, *rootDir, gopath)
- return &app{
- hasMain: hasMain,
- appFiles: appFiles,
- imports: im,
- }, err
-}
-
-// buildContext returns the context for building the source.
-func buildContext(tags []string) *build.Context {
- return &build.Context{
- GOARCH: build.Default.GOARCH,
- GOOS: build.Default.GOOS,
- GOROOT: build.Default.GOROOT,
- GOPATH: build.Default.GOPATH,
- Compiler: build.Default.Compiler,
- BuildTags: append(build.Default.BuildTags, tags...),
- }
-}
-
-// bundle bundles the app into the named tarFile ("-"==stdout).
-func (s *app) bundle(tarFile string) (err error) {
- var out io.Writer
- if tarFile == "-" {
- out = os.Stdout
- } else {
- f, err := os.Create(tarFile)
- if err != nil {
- return err
- }
- defer func() {
- if cerr := f.Close(); err == nil {
- err = cerr
- }
- }()
- out = f
- }
- tw := tar.NewWriter(out)
-
- for srcDir, importName := range s.imports {
- dstDir := "_gopath/src/" + importName
- if err = copyTree(tw, dstDir, srcDir); err != nil {
- return fmt.Errorf("unable to copy directory %v to %v: %v", srcDir, dstDir, err)
- }
- }
- if err := copyTree(tw, ".", *rootDir); err != nil {
- return fmt.Errorf("unable to copy root directory to /app: %v", err)
- }
- if !s.hasMain {
- if err := synthesizeMain(tw, s.appFiles); err != nil {
- return fmt.Errorf("unable to synthesize new main func: %v", err)
- }
- }
-
- if err := tw.Close(); err != nil {
- return fmt.Errorf("unable to close tar file %v: %v", tarFile, err)
- }
- return nil
-}
-
-// synthesizeMain generates a new main func and writes it to the tarball.
-func synthesizeMain(tw *tar.Writer, appFiles []string) error {
- appMap := make(map[string]bool)
- for _, f := range appFiles {
- appMap[f] = true
- }
- var f string
- for i := 0; i < 100; i++ {
- f = fmt.Sprintf("app_main%d.go", i)
- if !appMap[filepath.Join(*rootDir, f)] {
- break
- }
- }
- if appMap[filepath.Join(*rootDir, f)] {
- return fmt.Errorf("unable to find unique name for %v", f)
- }
- hdr := &tar.Header{
- Name: f,
- Mode: 0644,
- Size: int64(len(newMain)),
- }
- if err := tw.WriteHeader(hdr); err != nil {
- return fmt.Errorf("unable to write header for %v: %v", f, err)
- }
- if _, err := tw.Write([]byte(newMain)); err != nil {
- return fmt.Errorf("unable to write %v to tar file: %v", f, err)
- }
- return nil
-}
-
-// imports returns a map of all import directories (recursively) used by the app.
-// The return value maps full directory names to original import names.
-func imports(ctxt *build.Context, srcDir string, gopath []string) (map[string]string, error) {
- pkg, err := ctxt.ImportDir(srcDir, 0)
- if err != nil {
- return nil, fmt.Errorf("unable to analyze source: %v", err)
- }
-
- // Resolve all non-standard-library imports
- result := make(map[string]string)
- for _, v := range pkg.Imports {
- if !strings.Contains(v, ".") {
- continue
- }
- src, err := findInGopath(v, gopath)
- if err != nil {
- return nil, fmt.Errorf("unable to find import %v in gopath %v: %v", v, gopath, err)
- }
- result[src] = v
- im, err := imports(ctxt, src, gopath)
- if err != nil {
- return nil, fmt.Errorf("unable to parse package %v: %v", src, err)
- }
- for k, v := range im {
- result[k] = v
- }
- }
- return result, nil
-}
-
-// findInGopath searches the gopath for the named import directory.
-func findInGopath(dir string, gopath []string) (string, error) {
- for _, v := range gopath {
- dst := filepath.Join(v, "src", dir)
- if _, err := os.Stat(dst); err == nil {
- return dst, nil
- }
- }
- return "", fmt.Errorf("unable to find package %v in gopath %v", dir, gopath)
-}
-
-// copyTree copies srcDir to tar file dstDir, ignoring skipFiles.
-func copyTree(tw *tar.Writer, dstDir, srcDir string) error {
- entries, err := ioutil.ReadDir(srcDir)
- if err != nil {
- return fmt.Errorf("unable to read dir %v: %v", srcDir, err)
- }
- for _, entry := range entries {
- n := entry.Name()
- if skipFiles[n] {
- continue
- }
- s := filepath.Join(srcDir, n)
- d := filepath.Join(dstDir, n)
- if entry.IsDir() {
- if err := copyTree(tw, d, s); err != nil {
- return fmt.Errorf("unable to copy dir %v to %v: %v", s, d, err)
- }
- continue
- }
- if err := copyFile(tw, d, s); err != nil {
- return fmt.Errorf("unable to copy dir %v to %v: %v", s, d, err)
- }
- }
- return nil
-}
-
-// copyFile copies src to tar file dst.
-func copyFile(tw *tar.Writer, dst, src string) error {
- s, err := os.Open(src)
- if err != nil {
- return fmt.Errorf("unable to open %v: %v", src, err)
- }
- defer s.Close()
- fi, err := s.Stat()
- if err != nil {
- return fmt.Errorf("unable to stat %v: %v", src, err)
- }
-
- hdr, err := tar.FileInfoHeader(fi, dst)
- if err != nil {
- return fmt.Errorf("unable to create tar header for %v: %v", dst, err)
- }
- hdr.Name = dst
- if err := tw.WriteHeader(hdr); err != nil {
- return fmt.Errorf("unable to write header for %v: %v", dst, err)
- }
- _, err = io.Copy(tw, s)
- if err != nil {
- return fmt.Errorf("unable to copy %v to %v: %v", src, dst, err)
- }
- return nil
-}
-
-// checkMain verifies that there is a single "main" function.
-// It also returns a list of all Go source files in the app.
-func checkMain(ctxt *build.Context) (bool, []string, error) {
- pkg, err := ctxt.ImportDir(*rootDir, 0)
- if err != nil {
- return false, nil, fmt.Errorf("unable to analyze source: %v", err)
- }
- if !pkg.IsCommand() {
- errorf("Your app's package needs to be changed from %q to \"main\".\n", pkg.Name)
- }
- // Search for a "func main"
- var hasMain bool
- var appFiles []string
- for _, f := range pkg.GoFiles {
- n := filepath.Join(*rootDir, f)
- appFiles = append(appFiles, n)
- if hasMain, err = readFile(n); err != nil {
- return false, nil, fmt.Errorf("error parsing %q: %v", n, err)
- }
- }
- return hasMain, appFiles, nil
-}
-
-// isMain returns whether the given function declaration is a main function.
-// Such a function must be called "main", not have a receiver, and have no arguments or return types.
-func isMain(f *ast.FuncDecl) bool {
- ft := f.Type
- return f.Name.Name == "main" && f.Recv == nil && ft.Params.NumFields() == 0 && ft.Results.NumFields() == 0
-}
-
-// readFile reads and parses the Go source code file and returns whether it has a main function.
-func readFile(filename string) (hasMain bool, err error) {
- var src []byte
- src, err = ioutil.ReadFile(filename)
- if err != nil {
- return
- }
- fset := token.NewFileSet()
- file, err := parser.ParseFile(fset, filename, src, 0)
- for _, decl := range file.Decls {
- funcDecl, ok := decl.(*ast.FuncDecl)
- if !ok {
- continue
- }
- if !isMain(funcDecl) {
- continue
- }
- hasMain = true
- break
- }
- return
-}
diff --git a/vendor/google.golang.org/appengine/cmd/aedeploy/aedeploy.go b/vendor/google.golang.org/appengine/cmd/aedeploy/aedeploy.go
deleted file mode 100644
index 9c608ee..0000000
--- a/vendor/google.golang.org/appengine/cmd/aedeploy/aedeploy.go
+++ /dev/null
@@ -1,264 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Program aedeploy assists with deploying Go Managed VM apps to production.
-// A temporary directory is created; the app, its subdirectories, and all its
-// dependencies from $GOPATH are copied into the directory; then the app
-// is deployed to production with the provided command.
-//
-// The app must be in "package main".
-//
-// This command must be issued from within the root directory of the app
-// (where the app.yaml file is located).
-package main
-
-import (
- "flag"
- "fmt"
- "go/build"
- "io"
- "io/ioutil"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
-)
-
-var (
- skipFiles = map[string]bool{
- ".git": true,
- ".gitconfig": true,
- ".hg": true,
- ".travis.yml": true,
- }
-
- gopathCache = map[string]string{}
-)
-
-func usage() {
- fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
- fmt.Fprintf(os.Stderr, "\t%s gcloud --verbosity debug preview app deploy --version myversion ./app.yaml\tDeploy app to production\n", os.Args[0])
-}
-
-func main() {
- flag.Usage = usage
- flag.Parse()
- if flag.NArg() < 1 {
- usage()
- os.Exit(1)
- }
-
- if err := aedeploy(); err != nil {
- fmt.Fprintf(os.Stderr, os.Args[0]+": Error: %v\n", err)
- os.Exit(1)
- }
-}
-
-func aedeploy() error {
- tags := []string{"appenginevm"}
- app, err := analyze(tags)
- if err != nil {
- return err
- }
-
- tmpDir, err := app.bundle()
- if tmpDir != "" {
- defer os.RemoveAll(tmpDir)
- }
- if err != nil {
- return err
- }
-
- if err := os.Chdir(tmpDir); err != nil {
- return fmt.Errorf("unable to chdir to %v: %v", tmpDir, err)
- }
- return deploy()
-}
-
-// deploy calls the provided command to deploy the app from the temporary directory.
-func deploy() error {
- cmd := exec.Command(flag.Arg(0), flag.Args()[1:]...)
- cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
- if err := cmd.Run(); err != nil {
- return fmt.Errorf("unable to run %q: %v", strings.Join(flag.Args(), " "), err)
- }
- return nil
-}
-
-type app struct {
- appFiles []string
- imports map[string]string
-}
-
-// analyze checks the app for building with the given build tags and returns
-// app files, and a map of full directory import names to original import names.
-func analyze(tags []string) (*app, error) {
- ctxt := buildContext(tags)
- appFiles, err := appFiles(ctxt)
- if err != nil {
- return nil, err
- }
- gopath := filepath.SplitList(ctxt.GOPATH)
- im, err := imports(ctxt, ".", gopath)
- return &app{
- appFiles: appFiles,
- imports: im,
- }, err
-}
-
-// buildContext returns the context for building the source.
-func buildContext(tags []string) *build.Context {
- return &build.Context{
- GOARCH: "amd64",
- GOOS: "linux",
- GOROOT: build.Default.GOROOT,
- GOPATH: build.Default.GOPATH,
- Compiler: build.Default.Compiler,
- BuildTags: append(build.Default.BuildTags, tags...),
- }
-}
-
-// bundle bundles the app into a temporary directory.
-func (s *app) bundle() (tmpdir string, err error) {
- workDir, err := ioutil.TempDir("", "aedeploy")
- if err != nil {
- return "", fmt.Errorf("unable to create tmpdir: %v", err)
- }
-
- for srcDir, importName := range s.imports {
- dstDir := "_gopath/src/" + importName
- if err := copyTree(workDir, dstDir, srcDir); err != nil {
- return workDir, fmt.Errorf("unable to copy directory %v to %v: %v", srcDir, dstDir, err)
- }
- }
- if err := copyTree(workDir, ".", "."); err != nil {
- return workDir, fmt.Errorf("unable to copy root directory to /app: %v", err)
- }
- return workDir, nil
-}
-
-// imports returns a map of all import directories (recursively) used by the app.
-// The return value maps full directory names to original import names.
-func imports(ctxt *build.Context, srcDir string, gopath []string) (map[string]string, error) {
- pkg, err := ctxt.ImportDir(srcDir, 0)
- if err != nil {
- return nil, err
- }
-
- // Resolve all non-standard-library imports
- result := make(map[string]string)
- for _, v := range pkg.Imports {
- if !strings.Contains(v, ".") {
- continue
- }
- src, err := findInGopath(v, gopath)
- if err != nil {
- return nil, fmt.Errorf("unable to find import %v in gopath %v: %v", v, gopath, err)
- }
- if _, ok := result[src]; ok { // Already processed
- continue
- }
- result[src] = v
- im, err := imports(ctxt, src, gopath)
- if err != nil {
- return nil, fmt.Errorf("unable to parse package %v: %v", src, err)
- }
- for k, v := range im {
- result[k] = v
- }
- }
- return result, nil
-}
-
-// findInGopath searches the gopath for the named import directory.
-func findInGopath(dir string, gopath []string) (string, error) {
- if v, ok := gopathCache[dir]; ok {
- return v, nil
- }
- for _, v := range gopath {
- dst := filepath.Join(v, "src", dir)
- if _, err := os.Stat(dst); err == nil {
- gopathCache[dir] = dst
- return dst, nil
- }
- }
- return "", fmt.Errorf("unable to find package %v in gopath %v", dir, gopath)
-}
-
-// copyTree copies srcDir to dstDir relative to dstRoot, ignoring skipFiles.
-func copyTree(dstRoot, dstDir, srcDir string) error {
- d := filepath.Join(dstRoot, dstDir)
- if err := os.MkdirAll(d, 0755); err != nil {
- return fmt.Errorf("unable to create directory %q: %v", d, err)
- }
-
- entries, err := ioutil.ReadDir(srcDir)
- if err != nil {
- return fmt.Errorf("unable to read dir %q: %v", srcDir, err)
- }
- for _, entry := range entries {
- n := entry.Name()
- if skipFiles[n] {
- continue
- }
- s := filepath.Join(srcDir, n)
- if entry.Mode()&os.ModeSymlink == os.ModeSymlink {
- if entry, err = os.Stat(s); err != nil {
- return fmt.Errorf("unable to stat %v: %v", s, err)
- }
- }
- d := filepath.Join(dstDir, n)
- if entry.IsDir() {
- if err := copyTree(dstRoot, d, s); err != nil {
- return fmt.Errorf("unable to copy dir %q to %q: %v", s, d, err)
- }
- continue
- }
- if err := copyFile(dstRoot, d, s); err != nil {
- return fmt.Errorf("unable to copy dir %q to %q: %v", s, d, err)
- }
- }
- return nil
-}
-
-// copyFile copies src to dst relative to dstRoot.
-func copyFile(dstRoot, dst, src string) error {
- s, err := os.Open(src)
- if err != nil {
- return fmt.Errorf("unable to open %q: %v", src, err)
- }
- defer s.Close()
-
- dst = filepath.Join(dstRoot, dst)
- d, err := os.Create(dst)
- if err != nil {
- return fmt.Errorf("unable to create %q: %v", dst, err)
- }
- _, err = io.Copy(d, s)
- if err != nil {
- d.Close() // ignore error, copy already failed.
- return fmt.Errorf("unable to copy %q to %q: %v", src, dst, err)
- }
- if err := d.Close(); err != nil {
- return fmt.Errorf("unable to close %q: %v", dst, err)
- }
- return nil
-}
-
-// appFiles returns a list of all Go source files in the app.
-func appFiles(ctxt *build.Context) ([]string, error) {
- pkg, err := ctxt.ImportDir(".", 0)
- if err != nil {
- return nil, err
- }
- if !pkg.IsCommand() {
- return nil, fmt.Errorf(`the root of your app needs to be package "main" (currently %q). Please see https://cloud.google.com/appengine/docs/go/managed-vms for more details on structuring your app.`, pkg.Name)
- }
- var appFiles []string
- for _, f := range pkg.GoFiles {
- n := filepath.Join(".", f)
- appFiles = append(appFiles, n)
- }
- return appFiles, nil
-}
diff --git a/vendor/google.golang.org/appengine/datastore/datastore.go b/vendor/google.golang.org/appengine/datastore/datastore.go
deleted file mode 100644
index 9422e41..0000000
--- a/vendor/google.golang.org/appengine/datastore/datastore.go
+++ /dev/null
@@ -1,406 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-var (
- // ErrInvalidEntityType is returned when functions like Get or Next are
- // passed a dst or src argument of invalid type.
- ErrInvalidEntityType = errors.New("datastore: invalid entity type")
- // ErrInvalidKey is returned when an invalid key is presented.
- ErrInvalidKey = errors.New("datastore: invalid key")
- // ErrNoSuchEntity is returned when no entity was found for a given key.
- ErrNoSuchEntity = errors.New("datastore: no such entity")
-)
-
-// ErrFieldMismatch is returned when a field is to be loaded into a different
-// type than the one it was stored from, or when a field is missing or
-// unexported in the destination struct.
-// StructType is the type of the struct pointed to by the destination argument
-// passed to Get or to Iterator.Next.
-type ErrFieldMismatch struct {
- StructType reflect.Type
- FieldName string
- Reason string
-}
-
-func (e *ErrFieldMismatch) Error() string {
- return fmt.Sprintf("datastore: cannot load field %q into a %q: %s",
- e.FieldName, e.StructType, e.Reason)
-}
-
-// protoToKey converts a Reference proto to a *Key.
-func protoToKey(r *pb.Reference) (k *Key, err error) {
- appID := r.GetApp()
- namespace := r.GetNameSpace()
- for _, e := range r.Path.Element {
- k = &Key{
- kind: e.GetType(),
- stringID: e.GetName(),
- intID: e.GetId(),
- parent: k,
- appID: appID,
- namespace: namespace,
- }
- if !k.valid() {
- return nil, ErrInvalidKey
- }
- }
- return
-}
-
-// keyToProto converts a *Key to a Reference proto.
-func keyToProto(defaultAppID string, k *Key) *pb.Reference {
- appID := k.appID
- if appID == "" {
- appID = defaultAppID
- }
- n := 0
- for i := k; i != nil; i = i.parent {
- n++
- }
- e := make([]*pb.Path_Element, n)
- for i := k; i != nil; i = i.parent {
- n--
- e[n] = &pb.Path_Element{
- Type: &i.kind,
- }
- // At most one of {Name,Id} should be set.
- // Neither will be set for incomplete keys.
- if i.stringID != "" {
- e[n].Name = &i.stringID
- } else if i.intID != 0 {
- e[n].Id = &i.intID
- }
- }
- var namespace *string
- if k.namespace != "" {
- namespace = proto.String(k.namespace)
- }
- return &pb.Reference{
- App: proto.String(appID),
- NameSpace: namespace,
- Path: &pb.Path{
- Element: e,
- },
- }
-}
-
-// multiKeyToProto is a batch version of keyToProto.
-func multiKeyToProto(appID string, key []*Key) []*pb.Reference {
- ret := make([]*pb.Reference, len(key))
- for i, k := range key {
- ret[i] = keyToProto(appID, k)
- }
- return ret
-}
-
-// multiValid is a batch version of Key.valid. It returns an error, not a
-// []bool.
-func multiValid(key []*Key) error {
- invalid := false
- for _, k := range key {
- if !k.valid() {
- invalid = true
- break
- }
- }
- if !invalid {
- return nil
- }
- err := make(appengine.MultiError, len(key))
- for i, k := range key {
- if !k.valid() {
- err[i] = ErrInvalidKey
- }
- }
- return err
-}
-
-// It's unfortunate that the two semantically equivalent concepts pb.Reference
-// and pb.PropertyValue_ReferenceValue aren't the same type. For example, the
-// two have different protobuf field numbers.
-
-// referenceValueToKey is the same as protoToKey except the input is a
-// PropertyValue_ReferenceValue instead of a Reference.
-func referenceValueToKey(r *pb.PropertyValue_ReferenceValue) (k *Key, err error) {
- appID := r.GetApp()
- namespace := r.GetNameSpace()
- for _, e := range r.Pathelement {
- k = &Key{
- kind: e.GetType(),
- stringID: e.GetName(),
- intID: e.GetId(),
- parent: k,
- appID: appID,
- namespace: namespace,
- }
- if !k.valid() {
- return nil, ErrInvalidKey
- }
- }
- return
-}
-
-// keyToReferenceValue is the same as keyToProto except the output is a
-// PropertyValue_ReferenceValue instead of a Reference.
-func keyToReferenceValue(defaultAppID string, k *Key) *pb.PropertyValue_ReferenceValue {
- ref := keyToProto(defaultAppID, k)
- pe := make([]*pb.PropertyValue_ReferenceValue_PathElement, len(ref.Path.Element))
- for i, e := range ref.Path.Element {
- pe[i] = &pb.PropertyValue_ReferenceValue_PathElement{
- Type: e.Type,
- Id: e.Id,
- Name: e.Name,
- }
- }
- return &pb.PropertyValue_ReferenceValue{
- App: ref.App,
- NameSpace: ref.NameSpace,
- Pathelement: pe,
- }
-}
-
-type multiArgType int
-
-const (
- multiArgTypeInvalid multiArgType = iota
- multiArgTypePropertyLoadSaver
- multiArgTypeStruct
- multiArgTypeStructPtr
- multiArgTypeInterface
-)
-
-// checkMultiArg checks that v has type []S, []*S, []I, or []P, for some struct
-// type S, for some interface type I, or some non-interface non-pointer type P
-// such that P or *P implements PropertyLoadSaver.
-//
-// It returns what category the slice's elements are, and the reflect.Type
-// that represents S, I or P.
-//
-// As a special case, PropertyList is an invalid type for v.
-func checkMultiArg(v reflect.Value) (m multiArgType, elemType reflect.Type) {
- if v.Kind() != reflect.Slice {
- return multiArgTypeInvalid, nil
- }
- if v.Type() == typeOfPropertyList {
- return multiArgTypeInvalid, nil
- }
- elemType = v.Type().Elem()
- if reflect.PtrTo(elemType).Implements(typeOfPropertyLoadSaver) {
- return multiArgTypePropertyLoadSaver, elemType
- }
- switch elemType.Kind() {
- case reflect.Struct:
- return multiArgTypeStruct, elemType
- case reflect.Interface:
- return multiArgTypeInterface, elemType
- case reflect.Ptr:
- elemType = elemType.Elem()
- if elemType.Kind() == reflect.Struct {
- return multiArgTypeStructPtr, elemType
- }
- }
- return multiArgTypeInvalid, nil
-}
-
-// Get loads the entity stored for k into dst, which must be a struct pointer
-// or implement PropertyLoadSaver. If there is no such entity for the key, Get
-// returns ErrNoSuchEntity.
-//
-// The values of dst's unmatched struct fields are not modified, and matching
-// slice-typed fields are not reset before appending to them. In particular, it
-// is recommended to pass a pointer to a zero valued struct on each Get call.
-//
-// ErrFieldMismatch is returned when a field is to be loaded into a different
-// type than the one it was stored from, or when a field is missing or
-// unexported in the destination struct. ErrFieldMismatch is only returned if
-// dst is a struct pointer.
-func Get(c context.Context, key *Key, dst interface{}) error {
- if dst == nil { // GetMulti catches nil interface; we need to catch nil ptr here
- return ErrInvalidEntityType
- }
- err := GetMulti(c, []*Key{key}, []interface{}{dst})
- if me, ok := err.(appengine.MultiError); ok {
- return me[0]
- }
- return err
-}
-
-// GetMulti is a batch version of Get.
-//
-// dst must be a []S, []*S, []I or []P, for some struct type S, some interface
-// type I, or some non-interface non-pointer type P such that P or *P
-// implements PropertyLoadSaver. If an []I, each element must be a valid dst
-// for Get: it must be a struct pointer or implement PropertyLoadSaver.
-//
-// As a special case, PropertyList is an invalid type for dst, even though a
-// PropertyList is a slice of structs. It is treated as invalid to avoid being
-// mistakenly passed when []PropertyList was intended.
-func GetMulti(c context.Context, key []*Key, dst interface{}) error {
- v := reflect.ValueOf(dst)
- multiArgType, _ := checkMultiArg(v)
- if multiArgType == multiArgTypeInvalid {
- return errors.New("datastore: dst has invalid type")
- }
- if len(key) != v.Len() {
- return errors.New("datastore: key and dst slices have different length")
- }
- if len(key) == 0 {
- return nil
- }
- if err := multiValid(key); err != nil {
- return err
- }
- req := &pb.GetRequest{
- Key: multiKeyToProto(internal.FullyQualifiedAppID(c), key),
- }
- res := &pb.GetResponse{}
- if err := internal.Call(c, "datastore_v3", "Get", req, res); err != nil {
- return err
- }
- if len(key) != len(res.Entity) {
- return errors.New("datastore: internal error: server returned the wrong number of entities")
- }
- multiErr, any := make(appengine.MultiError, len(key)), false
- for i, e := range res.Entity {
- if e.Entity == nil {
- multiErr[i] = ErrNoSuchEntity
- } else {
- elem := v.Index(i)
- if multiArgType == multiArgTypePropertyLoadSaver || multiArgType == multiArgTypeStruct {
- elem = elem.Addr()
- }
- if multiArgType == multiArgTypeStructPtr && elem.IsNil() {
- elem.Set(reflect.New(elem.Type().Elem()))
- }
- multiErr[i] = loadEntity(elem.Interface(), e.Entity)
- }
- if multiErr[i] != nil {
- any = true
- }
- }
- if any {
- return multiErr
- }
- return nil
-}
-
-// Put saves the entity src into the datastore with key k. src must be a struct
-// pointer or implement PropertyLoadSaver; if a struct pointer then any
-// unexported fields of that struct will be skipped. If k is an incomplete key,
-// the returned key will be a unique key generated by the datastore.
-func Put(c context.Context, key *Key, src interface{}) (*Key, error) {
- k, err := PutMulti(c, []*Key{key}, []interface{}{src})
- if err != nil {
- if me, ok := err.(appengine.MultiError); ok {
- return nil, me[0]
- }
- return nil, err
- }
- return k[0], nil
-}
-
-// PutMulti is a batch version of Put.
-//
-// src must satisfy the same conditions as the dst argument to GetMulti.
-func PutMulti(c context.Context, key []*Key, src interface{}) ([]*Key, error) {
- v := reflect.ValueOf(src)
- multiArgType, _ := checkMultiArg(v)
- if multiArgType == multiArgTypeInvalid {
- return nil, errors.New("datastore: src has invalid type")
- }
- if len(key) != v.Len() {
- return nil, errors.New("datastore: key and src slices have different length")
- }
- if len(key) == 0 {
- return nil, nil
- }
- appID := internal.FullyQualifiedAppID(c)
- if err := multiValid(key); err != nil {
- return nil, err
- }
- req := &pb.PutRequest{}
- for i := range key {
- elem := v.Index(i)
- if multiArgType == multiArgTypePropertyLoadSaver || multiArgType == multiArgTypeStruct {
- elem = elem.Addr()
- }
- sProto, err := saveEntity(appID, key[i], elem.Interface())
- if err != nil {
- return nil, err
- }
- req.Entity = append(req.Entity, sProto)
- }
- res := &pb.PutResponse{}
- if err := internal.Call(c, "datastore_v3", "Put", req, res); err != nil {
- return nil, err
- }
- if len(key) != len(res.Key) {
- return nil, errors.New("datastore: internal error: server returned the wrong number of keys")
- }
- ret := make([]*Key, len(key))
- for i := range ret {
- var err error
- ret[i], err = protoToKey(res.Key[i])
- if err != nil || ret[i].Incomplete() {
- return nil, errors.New("datastore: internal error: server returned an invalid key")
- }
- }
- return ret, nil
-}
-
-// Delete deletes the entity for the given key.
-func Delete(c context.Context, key *Key) error {
- err := DeleteMulti(c, []*Key{key})
- if me, ok := err.(appengine.MultiError); ok {
- return me[0]
- }
- return err
-}
-
-// DeleteMulti is a batch version of Delete.
-func DeleteMulti(c context.Context, key []*Key) error {
- if len(key) == 0 {
- return nil
- }
- if err := multiValid(key); err != nil {
- return err
- }
- req := &pb.DeleteRequest{
- Key: multiKeyToProto(internal.FullyQualifiedAppID(c), key),
- }
- res := &pb.DeleteResponse{}
- return internal.Call(c, "datastore_v3", "Delete", req, res)
-}
-
-func namespaceMod(m proto.Message, namespace string) {
- // pb.Query is the only type that has a name_space field.
- // All other namespace support in datastore is in the keys.
- switch m := m.(type) {
- case *pb.Query:
- if m.NameSpace == nil {
- m.NameSpace = &namespace
- }
- }
-}
-
-func init() {
- internal.NamespaceMods["datastore_v3"] = namespaceMod
- internal.RegisterErrorCodeMap("datastore_v3", pb.Error_ErrorCode_name)
- internal.RegisterTimeoutErrorCode("datastore_v3", int32(pb.Error_TIMEOUT))
-}
diff --git a/vendor/google.golang.org/appengine/datastore/doc.go b/vendor/google.golang.org/appengine/datastore/doc.go
deleted file mode 100644
index 0d1cb5c..0000000
--- a/vendor/google.golang.org/appengine/datastore/doc.go
+++ /dev/null
@@ -1,351 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package datastore provides a client for App Engine's datastore service.
-
-
-Basic Operations
-
-Entities are the unit of storage and are associated with a key. A key
-consists of an optional parent key, a string application ID, a string kind
-(also known as an entity type), and either a StringID or an IntID. A
-StringID is also known as an entity name or key name.
-
-It is valid to create a key with a zero StringID and a zero IntID; this is
-called an incomplete key, and does not refer to any saved entity. Putting an
-entity into the datastore under an incomplete key will cause a unique key
-to be generated for that entity, with a non-zero IntID.
-
-An entity's contents are a mapping from case-sensitive field names to values.
-Valid value types are:
- - signed integers (int, int8, int16, int32 and int64),
- - bool,
- - string,
- - float32 and float64,
- - []byte (up to 1 megabyte in length),
- - any type whose underlying type is one of the above predeclared types,
- - ByteString,
- - *Key,
- - time.Time (stored with microsecond precision),
- - appengine.BlobKey,
- - appengine.GeoPoint,
- - structs whose fields are all valid value types,
- - slices of any of the above.
-
-Slices of structs are valid, as are structs that contain slices. However, if
-one struct contains another, then at most one of those can be repeated. This
-disqualifies recursively defined struct types: any struct T that (directly or
-indirectly) contains a []T.
-
-The Get and Put functions load and save an entity's contents. An entity's
-contents are typically represented by a struct pointer.
-
-Example code:
-
- type Entity struct {
- Value string
- }
-
- func handle(w http.ResponseWriter, r *http.Request) {
- ctx := appengine.NewContext(r)
-
- k := datastore.NewKey(ctx, "Entity", "stringID", 0, nil)
- e := new(Entity)
- if err := datastore.Get(ctx, k, e); err != nil {
- http.Error(w, err.Error(), 500)
- return
- }
-
- old := e.Value
- e.Value = r.URL.Path
-
- if _, err := datastore.Put(ctx, k, e); err != nil {
- http.Error(w, err.Error(), 500)
- return
- }
-
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- fmt.Fprintf(w, "old=%q\nnew=%q\n", old, e.Value)
- }
-
-GetMulti, PutMulti and DeleteMulti are batch versions of the Get, Put and
-Delete functions. They take a []*Key instead of a *Key, and may return an
-appengine.MultiError when encountering partial failure.
-
-
-Properties
-
-An entity's contents can be represented by a variety of types. These are
-typically struct pointers, but can also be any type that implements the
-PropertyLoadSaver interface. If using a struct pointer, you do not have to
-explicitly implement the PropertyLoadSaver interface; the datastore will
-automatically convert via reflection. If a struct pointer does implement that
-interface then those methods will be used in preference to the default
-behavior for struct pointers. Struct pointers are more strongly typed and are
-easier to use; PropertyLoadSavers are more flexible.
-
-The actual types passed do not have to match between Get and Put calls or even
-across different App Engine requests. It is valid to put a *PropertyList and
-get that same entity as a *myStruct, or put a *myStruct0 and get a *myStruct1.
-Conceptually, any entity is saved as a sequence of properties, and is loaded
-into the destination value on a property-by-property basis. When loading into
-a struct pointer, an entity that cannot be completely represented (such as a
-missing field) will result in an ErrFieldMismatch error but it is up to the
-caller whether this error is fatal, recoverable or ignorable.
-
-By default, for struct pointers, all properties are potentially indexed, and
-the property name is the same as the field name (and hence must start with an
-upper case letter). Fields may have a `datastore:"name,options"` tag. The tag
-name is the property name, which must be one or more valid Go identifiers
-joined by ".", but may start with a lower case letter. An empty tag name means
-to just use the field name. A "-" tag name means that the datastore will
-ignore that field. If options is "noindex" then the field will not be indexed.
-If the options is "" then the comma may be omitted. There are no other
-recognized options.
-
-Fields (except for []byte) are indexed by default. Strings longer than 1500
-bytes cannot be indexed; fields used to store long strings should be
-tagged with "noindex". Similarly, ByteStrings longer than 1500 bytes cannot be
-indexed.
-
-Example code:
-
- // A and B are renamed to a and b.
- // A, C and J are not indexed.
- // D's tag is equivalent to having no tag at all (E).
- // I is ignored entirely by the datastore.
- // J has tag information for both the datastore and json packages.
- type TaggedStruct struct {
- A int `datastore:"a,noindex"`
- B int `datastore:"b"`
- C int `datastore:",noindex"`
- D int `datastore:""`
- E int
- I int `datastore:"-"`
- J int `datastore:",noindex" json:"j"`
- }
-
-
-Structured Properties
-
-If the struct pointed to contains other structs, then the nested or embedded
-structs are flattened. For example, given these definitions:
-
- type Inner1 struct {
- W int32
- X string
- }
-
- type Inner2 struct {
- Y float64
- }
-
- type Inner3 struct {
- Z bool
- }
-
- type Outer struct {
- A int16
- I []Inner1
- J Inner2
- Inner3
- }
-
-then an Outer's properties would be equivalent to those of:
-
- type OuterEquivalent struct {
- A int16
- IDotW []int32 `datastore:"I.W"`
- IDotX []string `datastore:"I.X"`
- JDotY float64 `datastore:"J.Y"`
- Z bool
- }
-
-If Outer's embedded Inner3 field was tagged as `datastore:"Foo"` then the
-equivalent field would instead be: FooDotZ bool `datastore:"Foo.Z"`.
-
-If an outer struct is tagged "noindex" then all of its implicit flattened
-fields are effectively "noindex".
-
-
-The PropertyLoadSaver Interface
-
-An entity's contents can also be represented by any type that implements the
-PropertyLoadSaver interface. This type may be a struct pointer, but it does
-not have to be. The datastore package will call Load when getting the entity's
-contents, and Save when putting the entity's contents.
-Possible uses include deriving non-stored fields, verifying fields, or indexing
-a field only if its value is positive.
-
-Example code:
-
- type CustomPropsExample struct {
- I, J int
- // Sum is not stored, but should always be equal to I + J.
- Sum int `datastore:"-"`
- }
-
- func (x *CustomPropsExample) Load(ps []datastore.Property) error {
- // Load I and J as usual.
- if err := datastore.LoadStruct(x, ps); err != nil {
- return err
- }
- // Derive the Sum field.
- x.Sum = x.I + x.J
- return nil
- }
-
- func (x *CustomPropsExample) Save() ([]datastore.Property, error) {
- // Validate the Sum field.
- if x.Sum != x.I + x.J {
- return errors.New("CustomPropsExample has inconsistent sum")
- }
- // Save I and J as usual. The code below is equivalent to calling
- // "return datastore.SaveStruct(x)", but is done manually for
- // demonstration purposes.
- return []datastore.Property{
- {
- Name: "I",
- Value: int64(x.I),
- },
- {
- Name: "J",
- Value: int64(x.J),
- },
- }
- }
-
-The *PropertyList type implements PropertyLoadSaver, and can therefore hold an
-arbitrary entity's contents.
-
-
-Queries
-
-Queries retrieve entities based on their properties or key's ancestry. Running
-a query yields an iterator of results: either keys or (key, entity) pairs.
-Queries are re-usable and it is safe to call Query.Run from concurrent
-goroutines. Iterators are not safe for concurrent use.
-
-Queries are immutable, and are either created by calling NewQuery, or derived
-from an existing query by calling a method like Filter or Order that returns a
-new query value. A query is typically constructed by calling NewQuery followed
-by a chain of zero or more such methods. These methods are:
- - Ancestor and Filter constrain the entities returned by running a query.
- - Order affects the order in which they are returned.
- - Project constrains the fields returned.
- - Distinct de-duplicates projected entities.
- - KeysOnly makes the iterator return only keys, not (key, entity) pairs.
- - Start, End, Offset and Limit define which sub-sequence of matching entities
- to return. Start and End take cursors, Offset and Limit take integers. Start
- and Offset affect the first result, End and Limit affect the last result.
- If both Start and Offset are set, then the offset is relative to Start.
- If both End and Limit are set, then the earliest constraint wins. Limit is
- relative to Start+Offset, not relative to End. As a special case, a
- negative limit means unlimited.
-
-Example code:
-
- type Widget struct {
- Description string
- Price int
- }
-
- func handle(w http.ResponseWriter, r *http.Request) {
- ctx := appengine.NewContext(r)
- q := datastore.NewQuery("Widget").
- Filter("Price <", 1000).
- Order("-Price")
- b := new(bytes.Buffer)
- for t := q.Run(ctx); ; {
- var x Widget
- key, err := t.Next(&x)
- if err == datastore.Done {
- break
- }
- if err != nil {
- serveError(ctx, w, err)
- return
- }
- fmt.Fprintf(b, "Key=%v\nWidget=%#v\n\n", key, x)
- }
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- io.Copy(w, b)
- }
-
-
-Transactions
-
-RunInTransaction runs a function in a transaction.
-
-Example code:
-
- type Counter struct {
- Count int
- }
-
- func inc(ctx context.Context, key *datastore.Key) (int, error) {
- var x Counter
- if err := datastore.Get(ctx, key, &x); err != nil && err != datastore.ErrNoSuchEntity {
- return 0, err
- }
- x.Count++
- if _, err := datastore.Put(ctx, key, &x); err != nil {
- return 0, err
- }
- return x.Count, nil
- }
-
- func handle(w http.ResponseWriter, r *http.Request) {
- ctx := appengine.NewContext(r)
- var count int
- err := datastore.RunInTransaction(ctx, func(ctx context.Context) error {
- var err1 error
- count, err1 = inc(ctx, datastore.NewKey(ctx, "Counter", "singleton", 0, nil))
- return err1
- }, nil)
- if err != nil {
- serveError(ctx, w, err)
- return
- }
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- fmt.Fprintf(w, "Count=%d", count)
- }
-
-
-Metadata
-
-The datastore package provides access to some of App Engine's datastore
-metadata. This metadata includes information about the entity groups,
-namespaces, entity kinds, and properties in the datastore, as well as the
-property representations for each property.
-
-Example code:
-
- func handle(w http.ResponseWriter, r *http.Request) {
- // Print all the kinds in the datastore, with all the indexed
- // properties (and their representations) for each.
- ctx := appengine.NewContext(r)
-
- kinds, err := datastore.Kinds(ctx)
- if err != nil {
- serveError(ctx, w, err)
- return
- }
-
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- for _, kind := range kinds {
- fmt.Fprintf(w, "%s:\n", kind)
- props, err := datastore.KindProperties(ctx, kind)
- if err != nil {
- fmt.Fprintln(w, "\t(unable to retrieve properties)")
- continue
- }
- for p, rep := range props {
- fmt.Fprintf(w, "\t-%s (%s)\n", p, strings.Join(", ", rep))
- }
- }
- }
-*/
-package datastore
diff --git a/vendor/google.golang.org/appengine/datastore/key.go b/vendor/google.golang.org/appengine/datastore/key.go
deleted file mode 100644
index ac1f002..0000000
--- a/vendor/google.golang.org/appengine/datastore/key.go
+++ /dev/null
@@ -1,309 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "bytes"
- "encoding/base64"
- "encoding/gob"
- "errors"
- "fmt"
- "strconv"
- "strings"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-// Key represents the datastore key for a stored entity, and is immutable.
-type Key struct {
- kind string
- stringID string
- intID int64
- parent *Key
- appID string
- namespace string
-}
-
-// Kind returns the key's kind (also known as entity type).
-func (k *Key) Kind() string {
- return k.kind
-}
-
-// StringID returns the key's string ID (also known as an entity name or key
-// name), which may be "".
-func (k *Key) StringID() string {
- return k.stringID
-}
-
-// IntID returns the key's integer ID, which may be 0.
-func (k *Key) IntID() int64 {
- return k.intID
-}
-
-// Parent returns the key's parent key, which may be nil.
-func (k *Key) Parent() *Key {
- return k.parent
-}
-
-// AppID returns the key's application ID.
-func (k *Key) AppID() string {
- return k.appID
-}
-
-// Namespace returns the key's namespace.
-func (k *Key) Namespace() string {
- return k.namespace
-}
-
-// Incomplete returns whether the key does not refer to a stored entity.
-// In particular, whether the key has a zero StringID and a zero IntID.
-func (k *Key) Incomplete() bool {
- return k.stringID == "" && k.intID == 0
-}
-
-// valid returns whether the key is valid.
-func (k *Key) valid() bool {
- if k == nil {
- return false
- }
- for ; k != nil; k = k.parent {
- if k.kind == "" || k.appID == "" {
- return false
- }
- if k.stringID != "" && k.intID != 0 {
- return false
- }
- if k.parent != nil {
- if k.parent.Incomplete() {
- return false
- }
- if k.parent.appID != k.appID || k.parent.namespace != k.namespace {
- return false
- }
- }
- }
- return true
-}
-
-// Equal returns whether two keys are equal.
-func (k *Key) Equal(o *Key) bool {
- for k != nil && o != nil {
- if k.kind != o.kind || k.stringID != o.stringID || k.intID != o.intID || k.appID != o.appID || k.namespace != o.namespace {
- return false
- }
- k, o = k.parent, o.parent
- }
- return k == o
-}
-
-// root returns the furthest ancestor of a key, which may be itself.
-func (k *Key) root() *Key {
- for k.parent != nil {
- k = k.parent
- }
- return k
-}
-
-// marshal marshals the key's string representation to the buffer.
-func (k *Key) marshal(b *bytes.Buffer) {
- if k.parent != nil {
- k.parent.marshal(b)
- }
- b.WriteByte('/')
- b.WriteString(k.kind)
- b.WriteByte(',')
- if k.stringID != "" {
- b.WriteString(k.stringID)
- } else {
- b.WriteString(strconv.FormatInt(k.intID, 10))
- }
-}
-
-// String returns a string representation of the key.
-func (k *Key) String() string {
- if k == nil {
- return ""
- }
- b := bytes.NewBuffer(make([]byte, 0, 512))
- k.marshal(b)
- return b.String()
-}
-
-type gobKey struct {
- Kind string
- StringID string
- IntID int64
- Parent *gobKey
- AppID string
- Namespace string
-}
-
-func keyToGobKey(k *Key) *gobKey {
- if k == nil {
- return nil
- }
- return &gobKey{
- Kind: k.kind,
- StringID: k.stringID,
- IntID: k.intID,
- Parent: keyToGobKey(k.parent),
- AppID: k.appID,
- Namespace: k.namespace,
- }
-}
-
-func gobKeyToKey(gk *gobKey) *Key {
- if gk == nil {
- return nil
- }
- return &Key{
- kind: gk.Kind,
- stringID: gk.StringID,
- intID: gk.IntID,
- parent: gobKeyToKey(gk.Parent),
- appID: gk.AppID,
- namespace: gk.Namespace,
- }
-}
-
-func (k *Key) GobEncode() ([]byte, error) {
- buf := new(bytes.Buffer)
- if err := gob.NewEncoder(buf).Encode(keyToGobKey(k)); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-
-func (k *Key) GobDecode(buf []byte) error {
- gk := new(gobKey)
- if err := gob.NewDecoder(bytes.NewBuffer(buf)).Decode(gk); err != nil {
- return err
- }
- *k = *gobKeyToKey(gk)
- return nil
-}
-
-func (k *Key) MarshalJSON() ([]byte, error) {
- return []byte(`"` + k.Encode() + `"`), nil
-}
-
-func (k *Key) UnmarshalJSON(buf []byte) error {
- if len(buf) < 2 || buf[0] != '"' || buf[len(buf)-1] != '"' {
- return errors.New("datastore: bad JSON key")
- }
- k2, err := DecodeKey(string(buf[1 : len(buf)-1]))
- if err != nil {
- return err
- }
- *k = *k2
- return nil
-}
-
-// Encode returns an opaque representation of the key
-// suitable for use in HTML and URLs.
-// This is compatible with the Python and Java runtimes.
-func (k *Key) Encode() string {
- ref := keyToProto("", k)
-
- b, err := proto.Marshal(ref)
- if err != nil {
- panic(err)
- }
-
- // Trailing padding is stripped.
- return strings.TrimRight(base64.URLEncoding.EncodeToString(b), "=")
-}
-
-// DecodeKey decodes a key from the opaque representation returned by Encode.
-func DecodeKey(encoded string) (*Key, error) {
- // Re-add padding.
- if m := len(encoded) % 4; m != 0 {
- encoded += strings.Repeat("=", 4-m)
- }
-
- b, err := base64.URLEncoding.DecodeString(encoded)
- if err != nil {
- return nil, err
- }
-
- ref := new(pb.Reference)
- if err := proto.Unmarshal(b, ref); err != nil {
- return nil, err
- }
-
- return protoToKey(ref)
-}
-
-// NewIncompleteKey creates a new incomplete key.
-// kind cannot be empty.
-func NewIncompleteKey(c context.Context, kind string, parent *Key) *Key {
- return NewKey(c, kind, "", 0, parent)
-}
-
-// NewKey creates a new key.
-// kind cannot be empty.
-// Either one or both of stringID and intID must be zero. If both are zero,
-// the key returned is incomplete.
-// parent must either be a complete key or nil.
-func NewKey(c context.Context, kind, stringID string, intID int64, parent *Key) *Key {
- // If there's a parent key, use its namespace.
- // Otherwise, use any namespace attached to the context.
- var namespace string
- if parent != nil {
- namespace = parent.namespace
- } else {
- namespace = internal.NamespaceFromContext(c)
- }
-
- return &Key{
- kind: kind,
- stringID: stringID,
- intID: intID,
- parent: parent,
- appID: internal.FullyQualifiedAppID(c),
- namespace: namespace,
- }
-}
-
-// AllocateIDs returns a range of n integer IDs with the given kind and parent
-// combination. kind cannot be empty; parent may be nil. The IDs in the range
-// returned will not be used by the datastore's automatic ID sequence generator
-// and may be used with NewKey without conflict.
-//
-// The range is inclusive at the low end and exclusive at the high end. In
-// other words, valid intIDs x satisfy low <= x && x < high.
-//
-// If no error is returned, low + n == high.
-func AllocateIDs(c context.Context, kind string, parent *Key, n int) (low, high int64, err error) {
- if kind == "" {
- return 0, 0, errors.New("datastore: AllocateIDs given an empty kind")
- }
- if n < 0 {
- return 0, 0, fmt.Errorf("datastore: AllocateIDs given a negative count: %d", n)
- }
- if n == 0 {
- return 0, 0, nil
- }
- req := &pb.AllocateIdsRequest{
- ModelKey: keyToProto("", NewIncompleteKey(c, kind, parent)),
- Size: proto.Int64(int64(n)),
- }
- res := &pb.AllocateIdsResponse{}
- if err := internal.Call(c, "datastore_v3", "AllocateIds", req, res); err != nil {
- return 0, 0, err
- }
- // The protobuf is inclusive at both ends. Idiomatic Go (e.g. slices, for loops)
- // is inclusive at the low end and exclusive at the high end, so we add 1.
- low = res.GetStart()
- high = res.GetEnd() + 1
- if low+int64(n) != high {
- return 0, 0, fmt.Errorf("datastore: internal error: could not allocate %d IDs", n)
- }
- return low, high, nil
-}
diff --git a/vendor/google.golang.org/appengine/datastore/load.go b/vendor/google.golang.org/appengine/datastore/load.go
deleted file mode 100644
index 3f3c80c..0000000
--- a/vendor/google.golang.org/appengine/datastore/load.go
+++ /dev/null
@@ -1,334 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "fmt"
- "reflect"
- "time"
-
- "google.golang.org/appengine"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-var (
- typeOfBlobKey = reflect.TypeOf(appengine.BlobKey(""))
- typeOfByteSlice = reflect.TypeOf([]byte(nil))
- typeOfByteString = reflect.TypeOf(ByteString(nil))
- typeOfGeoPoint = reflect.TypeOf(appengine.GeoPoint{})
- typeOfTime = reflect.TypeOf(time.Time{})
-)
-
-// typeMismatchReason returns a string explaining why the property p could not
-// be stored in an entity field of type v.Type().
-func typeMismatchReason(p Property, v reflect.Value) string {
- entityType := "empty"
- switch p.Value.(type) {
- case int64:
- entityType = "int"
- case bool:
- entityType = "bool"
- case string:
- entityType = "string"
- case float64:
- entityType = "float"
- case *Key:
- entityType = "*datastore.Key"
- case time.Time:
- entityType = "time.Time"
- case appengine.BlobKey:
- entityType = "appengine.BlobKey"
- case appengine.GeoPoint:
- entityType = "appengine.GeoPoint"
- case ByteString:
- entityType = "datastore.ByteString"
- case []byte:
- entityType = "[]byte"
- }
- return fmt.Sprintf("type mismatch: %s versus %v", entityType, v.Type())
-}
-
-type propertyLoader struct {
- // m holds the number of times a substruct field like "Foo.Bar.Baz" has
- // been seen so far. The map is constructed lazily.
- m map[string]int
-}
-
-func (l *propertyLoader) load(codec *structCodec, structValue reflect.Value, p Property, requireSlice bool) string {
- var v reflect.Value
- // Traverse a struct's struct-typed fields.
- for name := p.Name; ; {
- decoder, ok := codec.byName[name]
- if !ok {
- return "no such struct field"
- }
- v = structValue.Field(decoder.index)
- if !v.IsValid() {
- return "no such struct field"
- }
- if !v.CanSet() {
- return "cannot set struct field"
- }
-
- if decoder.substructCodec == nil {
- break
- }
-
- if v.Kind() == reflect.Slice {
- if l.m == nil {
- l.m = make(map[string]int)
- }
- index := l.m[p.Name]
- l.m[p.Name] = index + 1
- for v.Len() <= index {
- v.Set(reflect.Append(v, reflect.New(v.Type().Elem()).Elem()))
- }
- structValue = v.Index(index)
- requireSlice = false
- } else {
- structValue = v
- }
- // Strip the "I." from "I.X".
- name = name[len(codec.byIndex[decoder.index].name):]
- codec = decoder.substructCodec
- }
-
- var slice reflect.Value
- if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
- slice = v
- v = reflect.New(v.Type().Elem()).Elem()
- } else if requireSlice {
- return "multiple-valued property requires a slice field type"
- }
-
- // Convert indexValues to a Go value with a meaning derived from the
- // destination type.
- pValue := p.Value
- if iv, ok := pValue.(indexValue); ok {
- meaning := pb.Property_NO_MEANING
- switch v.Type() {
- case typeOfBlobKey:
- meaning = pb.Property_BLOBKEY
- case typeOfByteSlice:
- meaning = pb.Property_BLOB
- case typeOfByteString:
- meaning = pb.Property_BYTESTRING
- case typeOfGeoPoint:
- meaning = pb.Property_GEORSS_POINT
- case typeOfTime:
- meaning = pb.Property_GD_WHEN
- }
- var err error
- pValue, err = propValue(iv.value, meaning)
- if err != nil {
- return err.Error()
- }
- }
-
- switch v.Kind() {
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- x, ok := pValue.(int64)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- if v.OverflowInt(x) {
- return fmt.Sprintf("value %v overflows struct field of type %v", x, v.Type())
- }
- v.SetInt(x)
- case reflect.Bool:
- x, ok := pValue.(bool)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- v.SetBool(x)
- case reflect.String:
- switch x := pValue.(type) {
- case appengine.BlobKey:
- v.SetString(string(x))
- case ByteString:
- v.SetString(string(x))
- case string:
- v.SetString(x)
- default:
- if pValue != nil {
- return typeMismatchReason(p, v)
- }
- }
- case reflect.Float32, reflect.Float64:
- x, ok := pValue.(float64)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- if v.OverflowFloat(x) {
- return fmt.Sprintf("value %v overflows struct field of type %v", x, v.Type())
- }
- v.SetFloat(x)
- case reflect.Ptr:
- x, ok := pValue.(*Key)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- if _, ok := v.Interface().(*Key); !ok {
- return typeMismatchReason(p, v)
- }
- v.Set(reflect.ValueOf(x))
- case reflect.Struct:
- switch v.Type() {
- case typeOfTime:
- x, ok := pValue.(time.Time)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- v.Set(reflect.ValueOf(x))
- case typeOfGeoPoint:
- x, ok := pValue.(appengine.GeoPoint)
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- v.Set(reflect.ValueOf(x))
- default:
- return typeMismatchReason(p, v)
- }
- case reflect.Slice:
- x, ok := pValue.([]byte)
- if !ok {
- if y, yok := pValue.(ByteString); yok {
- x, ok = []byte(y), true
- }
- }
- if !ok && pValue != nil {
- return typeMismatchReason(p, v)
- }
- if v.Type().Elem().Kind() != reflect.Uint8 {
- return typeMismatchReason(p, v)
- }
- v.SetBytes(x)
- default:
- return typeMismatchReason(p, v)
- }
- if slice.IsValid() {
- slice.Set(reflect.Append(slice, v))
- }
- return ""
-}
-
-// loadEntity loads an EntityProto into PropertyLoadSaver or struct pointer.
-func loadEntity(dst interface{}, src *pb.EntityProto) (err error) {
- props, err := protoToProperties(src)
- if err != nil {
- return err
- }
- if e, ok := dst.(PropertyLoadSaver); ok {
- return e.Load(props)
- }
- return LoadStruct(dst, props)
-}
-
-func (s structPLS) Load(props []Property) error {
- var fieldName, reason string
- var l propertyLoader
- for _, p := range props {
- if errStr := l.load(s.codec, s.v, p, p.Multiple); errStr != "" {
- // We don't return early, as we try to load as many properties as possible.
- // It is valid to load an entity into a struct that cannot fully represent it.
- // That case returns an error, but the caller is free to ignore it.
- fieldName, reason = p.Name, errStr
- }
- }
- if reason != "" {
- return &ErrFieldMismatch{
- StructType: s.v.Type(),
- FieldName: fieldName,
- Reason: reason,
- }
- }
- return nil
-}
-
-func protoToProperties(src *pb.EntityProto) ([]Property, error) {
- props, rawProps := src.Property, src.RawProperty
- out := make([]Property, 0, len(props)+len(rawProps))
- for {
- var (
- x *pb.Property
- noIndex bool
- )
- if len(props) > 0 {
- x, props = props[0], props[1:]
- } else if len(rawProps) > 0 {
- x, rawProps = rawProps[0], rawProps[1:]
- noIndex = true
- } else {
- break
- }
-
- var value interface{}
- if x.Meaning != nil && *x.Meaning == pb.Property_INDEX_VALUE {
- value = indexValue{x.Value}
- } else {
- var err error
- value, err = propValue(x.Value, x.GetMeaning())
- if err != nil {
- return nil, err
- }
- }
- out = append(out, Property{
- Name: x.GetName(),
- Value: value,
- NoIndex: noIndex,
- Multiple: x.GetMultiple(),
- })
- }
- return out, nil
-}
-
-// propValue returns a Go value that combines the raw PropertyValue with a
-// meaning. For example, an Int64Value with GD_WHEN becomes a time.Time.
-func propValue(v *pb.PropertyValue, m pb.Property_Meaning) (interface{}, error) {
- switch {
- case v.Int64Value != nil:
- if m == pb.Property_GD_WHEN {
- return fromUnixMicro(*v.Int64Value), nil
- } else {
- return *v.Int64Value, nil
- }
- case v.BooleanValue != nil:
- return *v.BooleanValue, nil
- case v.StringValue != nil:
- if m == pb.Property_BLOB {
- return []byte(*v.StringValue), nil
- } else if m == pb.Property_BLOBKEY {
- return appengine.BlobKey(*v.StringValue), nil
- } else if m == pb.Property_BYTESTRING {
- return ByteString(*v.StringValue), nil
- } else {
- return *v.StringValue, nil
- }
- case v.DoubleValue != nil:
- return *v.DoubleValue, nil
- case v.Referencevalue != nil:
- key, err := referenceValueToKey(v.Referencevalue)
- if err != nil {
- return nil, err
- }
- return key, nil
- case v.Pointvalue != nil:
- // NOTE: Strangely, latitude maps to X, longitude to Y.
- return appengine.GeoPoint{Lat: v.Pointvalue.GetX(), Lng: v.Pointvalue.GetY()}, nil
- }
- return nil, nil
-}
-
-// indexValue is a Property value that is created when entities are loaded from
-// an index, such as from a projection query.
-//
-// Such Property values do not contain all of the metadata required to be
-// faithfully represented as a Go value, and are instead represented as an
-// opaque indexValue. Load the properties into a concrete struct type (e.g. by
-// passing a struct pointer to Iterator.Next) to reconstruct actual Go values
-// of type int, string, time.Time, etc.
-type indexValue struct {
- value *pb.PropertyValue
-}
diff --git a/vendor/google.golang.org/appengine/datastore/metadata.go b/vendor/google.golang.org/appengine/datastore/metadata.go
deleted file mode 100644
index 3192a31..0000000
--- a/vendor/google.golang.org/appengine/datastore/metadata.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2016 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import "golang.org/x/net/context"
-
-// Datastore kinds for the metadata entities.
-const (
- namespaceKind = "__namespace__"
- kindKind = "__kind__"
- propertyKind = "__property__"
- entityGroupKind = "__entitygroup__"
-)
-
-// Namespaces returns all the datastore namespaces.
-func Namespaces(ctx context.Context) ([]string, error) {
- // TODO(djd): Support range queries.
- q := NewQuery(namespaceKind).KeysOnly()
- keys, err := q.GetAll(ctx, nil)
- if err != nil {
- return nil, err
- }
- // The empty namespace key uses a numeric ID (==1), but luckily
- // the string ID defaults to "" for numeric IDs anyway.
- return keyNames(keys), nil
-}
-
-// Kinds returns the names of all the kinds in the current namespace.
-func Kinds(ctx context.Context) ([]string, error) {
- // TODO(djd): Support range queries.
- q := NewQuery(kindKind).KeysOnly()
- keys, err := q.GetAll(ctx, nil)
- if err != nil {
- return nil, err
- }
- return keyNames(keys), nil
-}
-
-// keyNames returns a slice of the provided keys' names (string IDs).
-func keyNames(keys []*Key) []string {
- n := make([]string, 0, len(keys))
- for _, k := range keys {
- n = append(n, k.StringID())
- }
- return n
-}
-
-// KindProperties returns all the indexed properties for the given kind.
-// The properties are returned as a map of property names to a slice of the
-// representation types. The representation types for the supported Go property
-// types are:
-// "INT64": signed integers and time.Time
-// "DOUBLE": float32 and float64
-// "BOOLEAN": bool
-// "STRING": string, []byte and ByteString
-// "POINT": appengine.GeoPoint
-// "REFERENCE": *Key
-// "USER": (not used in the Go runtime)
-func KindProperties(ctx context.Context, kind string) (map[string][]string, error) {
- // TODO(djd): Support range queries.
- kindKey := NewKey(ctx, kindKind, kind, 0, nil)
- q := NewQuery(propertyKind).Ancestor(kindKey)
-
- propMap := map[string][]string{}
- props := []struct {
- Repr []string `datastore:property_representation`
- }{}
-
- keys, err := q.GetAll(ctx, &props)
- if err != nil {
- return nil, err
- }
- for i, p := range props {
- propMap[keys[i].StringID()] = p.Repr
- }
- return propMap, nil
-}
diff --git a/vendor/google.golang.org/appengine/datastore/prop.go b/vendor/google.golang.org/appengine/datastore/prop.go
deleted file mode 100644
index 3caef9a..0000000
--- a/vendor/google.golang.org/appengine/datastore/prop.go
+++ /dev/null
@@ -1,294 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "fmt"
- "reflect"
- "strings"
- "sync"
- "unicode"
-)
-
-// Entities with more than this many indexed properties will not be saved.
-const maxIndexedProperties = 20000
-
-// []byte fields more than 1 megabyte long will not be loaded or saved.
-const maxBlobLen = 1 << 20
-
-// Property is a name/value pair plus some metadata. A datastore entity's
-// contents are loaded and saved as a sequence of Properties. An entity can
-// have multiple Properties with the same name, provided that p.Multiple is
-// true on all of that entity's Properties with that name.
-type Property struct {
- // Name is the property name.
- Name string
- // Value is the property value. The valid types are:
- // - int64
- // - bool
- // - string
- // - float64
- // - ByteString
- // - *Key
- // - time.Time
- // - appengine.BlobKey
- // - appengine.GeoPoint
- // - []byte (up to 1 megabyte in length)
- // This set is smaller than the set of valid struct field types that the
- // datastore can load and save. A Property Value cannot be a slice (apart
- // from []byte); use multiple Properties instead. Also, a Value's type
- // must be explicitly on the list above; it is not sufficient for the
- // underlying type to be on that list. For example, a Value of "type
- // myInt64 int64" is invalid. Smaller-width integers and floats are also
- // invalid. Again, this is more restrictive than the set of valid struct
- // field types.
- //
- // A Value will have an opaque type when loading entities from an index,
- // such as via a projection query. Load entities into a struct instead
- // of a PropertyLoadSaver when using a projection query.
- //
- // A Value may also be the nil interface value; this is equivalent to
- // Python's None but not directly representable by a Go struct. Loading
- // a nil-valued property into a struct will set that field to the zero
- // value.
- Value interface{}
- // NoIndex is whether the datastore cannot index this property.
- NoIndex bool
- // Multiple is whether the entity can have multiple properties with
- // the same name. Even if a particular instance only has one property with
- // a certain name, Multiple should be true if a struct would best represent
- // it as a field of type []T instead of type T.
- Multiple bool
-}
-
-// ByteString is a short byte slice (up to 1500 bytes) that can be indexed.
-type ByteString []byte
-
-// PropertyLoadSaver can be converted from and to a slice of Properties.
-type PropertyLoadSaver interface {
- Load([]Property) error
- Save() ([]Property, error)
-}
-
-// PropertyList converts a []Property to implement PropertyLoadSaver.
-type PropertyList []Property
-
-var (
- typeOfPropertyLoadSaver = reflect.TypeOf((*PropertyLoadSaver)(nil)).Elem()
- typeOfPropertyList = reflect.TypeOf(PropertyList(nil))
-)
-
-// Load loads all of the provided properties into l.
-// It does not first reset *l to an empty slice.
-func (l *PropertyList) Load(p []Property) error {
- *l = append(*l, p...)
- return nil
-}
-
-// Save saves all of l's properties as a slice or Properties.
-func (l *PropertyList) Save() ([]Property, error) {
- return *l, nil
-}
-
-// validPropertyName returns whether name consists of one or more valid Go
-// identifiers joined by ".".
-func validPropertyName(name string) bool {
- if name == "" {
- return false
- }
- for _, s := range strings.Split(name, ".") {
- if s == "" {
- return false
- }
- first := true
- for _, c := range s {
- if first {
- first = false
- if c != '_' && !unicode.IsLetter(c) {
- return false
- }
- } else {
- if c != '_' && !unicode.IsLetter(c) && !unicode.IsDigit(c) {
- return false
- }
- }
- }
- }
- return true
-}
-
-// structTag is the parsed `datastore:"name,options"` tag of a struct field.
-// If a field has no tag, or the tag has an empty name, then the structTag's
-// name is just the field name. A "-" name means that the datastore ignores
-// that field.
-type structTag struct {
- name string
- noIndex bool
-}
-
-// structCodec describes how to convert a struct to and from a sequence of
-// properties.
-type structCodec struct {
- // byIndex gives the structTag for the i'th field.
- byIndex []structTag
- // byName gives the field codec for the structTag with the given name.
- byName map[string]fieldCodec
- // hasSlice is whether a struct or any of its nested or embedded structs
- // has a slice-typed field (other than []byte).
- hasSlice bool
- // complete is whether the structCodec is complete. An incomplete
- // structCodec may be encountered when walking a recursive struct.
- complete bool
-}
-
-// fieldCodec is a struct field's index and, if that struct field's type is
-// itself a struct, that substruct's structCodec.
-type fieldCodec struct {
- index int
- substructCodec *structCodec
-}
-
-// structCodecs collects the structCodecs that have already been calculated.
-var (
- structCodecsMutex sync.Mutex
- structCodecs = make(map[reflect.Type]*structCodec)
-)
-
-// getStructCodec returns the structCodec for the given struct type.
-func getStructCodec(t reflect.Type) (*structCodec, error) {
- structCodecsMutex.Lock()
- defer structCodecsMutex.Unlock()
- return getStructCodecLocked(t)
-}
-
-// getStructCodecLocked implements getStructCodec. The structCodecsMutex must
-// be held when calling this function.
-func getStructCodecLocked(t reflect.Type) (ret *structCodec, retErr error) {
- c, ok := structCodecs[t]
- if ok {
- return c, nil
- }
- c = &structCodec{
- byIndex: make([]structTag, t.NumField()),
- byName: make(map[string]fieldCodec),
- }
-
- // Add c to the structCodecs map before we are sure it is good. If t is
- // a recursive type, it needs to find the incomplete entry for itself in
- // the map.
- structCodecs[t] = c
- defer func() {
- if retErr != nil {
- delete(structCodecs, t)
- }
- }()
-
- for i := range c.byIndex {
- f := t.Field(i)
- name, opts := f.Tag.Get("datastore"), ""
- if i := strings.Index(name, ","); i != -1 {
- name, opts = name[:i], name[i+1:]
- }
- if name == "" {
- if !f.Anonymous {
- name = f.Name
- }
- } else if name == "-" {
- c.byIndex[i] = structTag{name: name}
- continue
- } else if !validPropertyName(name) {
- return nil, fmt.Errorf("datastore: struct tag has invalid property name: %q", name)
- }
-
- substructType, fIsSlice := reflect.Type(nil), false
- switch f.Type.Kind() {
- case reflect.Struct:
- substructType = f.Type
- case reflect.Slice:
- if f.Type.Elem().Kind() == reflect.Struct {
- substructType = f.Type.Elem()
- }
- fIsSlice = f.Type != typeOfByteSlice
- c.hasSlice = c.hasSlice || fIsSlice
- }
-
- if substructType != nil && substructType != typeOfTime && substructType != typeOfGeoPoint {
- if name != "" {
- name = name + "."
- }
- sub, err := getStructCodecLocked(substructType)
- if err != nil {
- return nil, err
- }
- if !sub.complete {
- return nil, fmt.Errorf("datastore: recursive struct: field %q", f.Name)
- }
- if fIsSlice && sub.hasSlice {
- return nil, fmt.Errorf(
- "datastore: flattening nested structs leads to a slice of slices: field %q", f.Name)
- }
- c.hasSlice = c.hasSlice || sub.hasSlice
- for relName := range sub.byName {
- absName := name + relName
- if _, ok := c.byName[absName]; ok {
- return nil, fmt.Errorf("datastore: struct tag has repeated property name: %q", absName)
- }
- c.byName[absName] = fieldCodec{index: i, substructCodec: sub}
- }
- } else {
- if _, ok := c.byName[name]; ok {
- return nil, fmt.Errorf("datastore: struct tag has repeated property name: %q", name)
- }
- c.byName[name] = fieldCodec{index: i}
- }
-
- c.byIndex[i] = structTag{
- name: name,
- noIndex: opts == "noindex",
- }
- }
- c.complete = true
- return c, nil
-}
-
-// structPLS adapts a struct to be a PropertyLoadSaver.
-type structPLS struct {
- v reflect.Value
- codec *structCodec
-}
-
-// newStructPLS returns a PropertyLoadSaver for the struct pointer p.
-func newStructPLS(p interface{}) (PropertyLoadSaver, error) {
- v := reflect.ValueOf(p)
- if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
- return nil, ErrInvalidEntityType
- }
- v = v.Elem()
- codec, err := getStructCodec(v.Type())
- if err != nil {
- return nil, err
- }
- return structPLS{v, codec}, nil
-}
-
-// LoadStruct loads the properties from p to dst.
-// dst must be a struct pointer.
-func LoadStruct(dst interface{}, p []Property) error {
- x, err := newStructPLS(dst)
- if err != nil {
- return err
- }
- return x.Load(p)
-}
-
-// SaveStruct returns the properties from src as a slice of Properties.
-// src must be a struct pointer.
-func SaveStruct(src interface{}) ([]Property, error) {
- x, err := newStructPLS(src)
- if err != nil {
- return nil, err
- }
- return x.Save()
-}
diff --git a/vendor/google.golang.org/appengine/datastore/query.go b/vendor/google.golang.org/appengine/datastore/query.go
deleted file mode 100644
index 696a5d3..0000000
--- a/vendor/google.golang.org/appengine/datastore/query.go
+++ /dev/null
@@ -1,713 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "encoding/base64"
- "errors"
- "fmt"
- "math"
- "reflect"
- "strings"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-type operator int
-
-const (
- lessThan operator = iota
- lessEq
- equal
- greaterEq
- greaterThan
-)
-
-var operatorToProto = map[operator]*pb.Query_Filter_Operator{
- lessThan: pb.Query_Filter_LESS_THAN.Enum(),
- lessEq: pb.Query_Filter_LESS_THAN_OR_EQUAL.Enum(),
- equal: pb.Query_Filter_EQUAL.Enum(),
- greaterEq: pb.Query_Filter_GREATER_THAN_OR_EQUAL.Enum(),
- greaterThan: pb.Query_Filter_GREATER_THAN.Enum(),
-}
-
-// filter is a conditional filter on query results.
-type filter struct {
- FieldName string
- Op operator
- Value interface{}
-}
-
-type sortDirection int
-
-const (
- ascending sortDirection = iota
- descending
-)
-
-var sortDirectionToProto = map[sortDirection]*pb.Query_Order_Direction{
- ascending: pb.Query_Order_ASCENDING.Enum(),
- descending: pb.Query_Order_DESCENDING.Enum(),
-}
-
-// order is a sort order on query results.
-type order struct {
- FieldName string
- Direction sortDirection
-}
-
-// NewQuery creates a new Query for a specific entity kind.
-//
-// An empty kind means to return all entities, including entities created and
-// managed by other App Engine features, and is called a kindless query.
-// Kindless queries cannot include filters or sort orders on property values.
-func NewQuery(kind string) *Query {
- return &Query{
- kind: kind,
- limit: -1,
- }
-}
-
-// Query represents a datastore query.
-type Query struct {
- kind string
- ancestor *Key
- filter []filter
- order []order
- projection []string
-
- distinct bool
- keysOnly bool
- eventual bool
- limit int32
- offset int32
- start *pb.CompiledCursor
- end *pb.CompiledCursor
-
- err error
-}
-
-func (q *Query) clone() *Query {
- x := *q
- // Copy the contents of the slice-typed fields to a new backing store.
- if len(q.filter) > 0 {
- x.filter = make([]filter, len(q.filter))
- copy(x.filter, q.filter)
- }
- if len(q.order) > 0 {
- x.order = make([]order, len(q.order))
- copy(x.order, q.order)
- }
- return &x
-}
-
-// Ancestor returns a derivative query with an ancestor filter.
-// The ancestor should not be nil.
-func (q *Query) Ancestor(ancestor *Key) *Query {
- q = q.clone()
- if ancestor == nil {
- q.err = errors.New("datastore: nil query ancestor")
- return q
- }
- q.ancestor = ancestor
- return q
-}
-
-// EventualConsistency returns a derivative query that returns eventually
-// consistent results.
-// It only has an effect on ancestor queries.
-func (q *Query) EventualConsistency() *Query {
- q = q.clone()
- q.eventual = true
- return q
-}
-
-// Filter returns a derivative query with a field-based filter.
-// The filterStr argument must be a field name followed by optional space,
-// followed by an operator, one of ">", "<", ">=", "<=", or "=".
-// Fields are compared against the provided value using the operator.
-// Multiple filters are AND'ed together.
-func (q *Query) Filter(filterStr string, value interface{}) *Query {
- q = q.clone()
- filterStr = strings.TrimSpace(filterStr)
- if len(filterStr) < 1 {
- q.err = errors.New("datastore: invalid filter: " + filterStr)
- return q
- }
- f := filter{
- FieldName: strings.TrimRight(filterStr, " ><=!"),
- Value: value,
- }
- switch op := strings.TrimSpace(filterStr[len(f.FieldName):]); op {
- case "<=":
- f.Op = lessEq
- case ">=":
- f.Op = greaterEq
- case "<":
- f.Op = lessThan
- case ">":
- f.Op = greaterThan
- case "=":
- f.Op = equal
- default:
- q.err = fmt.Errorf("datastore: invalid operator %q in filter %q", op, filterStr)
- return q
- }
- q.filter = append(q.filter, f)
- return q
-}
-
-// Order returns a derivative query with a field-based sort order. Orders are
-// applied in the order they are added. The default order is ascending; to sort
-// in descending order prefix the fieldName with a minus sign (-).
-func (q *Query) Order(fieldName string) *Query {
- q = q.clone()
- fieldName = strings.TrimSpace(fieldName)
- o := order{
- Direction: ascending,
- FieldName: fieldName,
- }
- if strings.HasPrefix(fieldName, "-") {
- o.Direction = descending
- o.FieldName = strings.TrimSpace(fieldName[1:])
- } else if strings.HasPrefix(fieldName, "+") {
- q.err = fmt.Errorf("datastore: invalid order: %q", fieldName)
- return q
- }
- if len(o.FieldName) == 0 {
- q.err = errors.New("datastore: empty order")
- return q
- }
- q.order = append(q.order, o)
- return q
-}
-
-// Project returns a derivative query that yields only the given fields. It
-// cannot be used with KeysOnly.
-func (q *Query) Project(fieldNames ...string) *Query {
- q = q.clone()
- q.projection = append([]string(nil), fieldNames...)
- return q
-}
-
-// Distinct returns a derivative query that yields de-duplicated entities with
-// respect to the set of projected fields. It is only used for projection
-// queries.
-func (q *Query) Distinct() *Query {
- q = q.clone()
- q.distinct = true
- return q
-}
-
-// KeysOnly returns a derivative query that yields only keys, not keys and
-// entities. It cannot be used with projection queries.
-func (q *Query) KeysOnly() *Query {
- q = q.clone()
- q.keysOnly = true
- return q
-}
-
-// Limit returns a derivative query that has a limit on the number of results
-// returned. A negative value means unlimited.
-func (q *Query) Limit(limit int) *Query {
- q = q.clone()
- if limit < math.MinInt32 || limit > math.MaxInt32 {
- q.err = errors.New("datastore: query limit overflow")
- return q
- }
- q.limit = int32(limit)
- return q
-}
-
-// Offset returns a derivative query that has an offset of how many keys to
-// skip over before returning results. A negative value is invalid.
-func (q *Query) Offset(offset int) *Query {
- q = q.clone()
- if offset < 0 {
- q.err = errors.New("datastore: negative query offset")
- return q
- }
- if offset > math.MaxInt32 {
- q.err = errors.New("datastore: query offset overflow")
- return q
- }
- q.offset = int32(offset)
- return q
-}
-
-// Start returns a derivative query with the given start point.
-func (q *Query) Start(c Cursor) *Query {
- q = q.clone()
- if c.cc == nil {
- q.err = errors.New("datastore: invalid cursor")
- return q
- }
- q.start = c.cc
- return q
-}
-
-// End returns a derivative query with the given end point.
-func (q *Query) End(c Cursor) *Query {
- q = q.clone()
- if c.cc == nil {
- q.err = errors.New("datastore: invalid cursor")
- return q
- }
- q.end = c.cc
- return q
-}
-
-// toProto converts the query to a protocol buffer.
-func (q *Query) toProto(dst *pb.Query, appID string) error {
- if len(q.projection) != 0 && q.keysOnly {
- return errors.New("datastore: query cannot both project and be keys-only")
- }
- dst.Reset()
- dst.App = proto.String(appID)
- if q.kind != "" {
- dst.Kind = proto.String(q.kind)
- }
- if q.ancestor != nil {
- dst.Ancestor = keyToProto(appID, q.ancestor)
- if q.eventual {
- dst.Strong = proto.Bool(false)
- }
- }
- if q.projection != nil {
- dst.PropertyName = q.projection
- if q.distinct {
- dst.GroupByPropertyName = q.projection
- }
- }
- if q.keysOnly {
- dst.KeysOnly = proto.Bool(true)
- dst.RequirePerfectPlan = proto.Bool(true)
- }
- for _, qf := range q.filter {
- if qf.FieldName == "" {
- return errors.New("datastore: empty query filter field name")
- }
- p, errStr := valueToProto(appID, qf.FieldName, reflect.ValueOf(qf.Value), false)
- if errStr != "" {
- return errors.New("datastore: bad query filter value type: " + errStr)
- }
- xf := &pb.Query_Filter{
- Op: operatorToProto[qf.Op],
- Property: []*pb.Property{p},
- }
- if xf.Op == nil {
- return errors.New("datastore: unknown query filter operator")
- }
- dst.Filter = append(dst.Filter, xf)
- }
- for _, qo := range q.order {
- if qo.FieldName == "" {
- return errors.New("datastore: empty query order field name")
- }
- xo := &pb.Query_Order{
- Property: proto.String(qo.FieldName),
- Direction: sortDirectionToProto[qo.Direction],
- }
- if xo.Direction == nil {
- return errors.New("datastore: unknown query order direction")
- }
- dst.Order = append(dst.Order, xo)
- }
- if q.limit >= 0 {
- dst.Limit = proto.Int32(q.limit)
- }
- if q.offset != 0 {
- dst.Offset = proto.Int32(q.offset)
- }
- dst.CompiledCursor = q.start
- dst.EndCompiledCursor = q.end
- dst.Compile = proto.Bool(true)
- return nil
-}
-
-// Count returns the number of results for the query.
-func (q *Query) Count(c context.Context) (int, error) {
- // Check that the query is well-formed.
- if q.err != nil {
- return 0, q.err
- }
-
- // Run a copy of the query, with keysOnly true (if we're not a projection,
- // since the two are incompatible), and an adjusted offset. We also set the
- // limit to zero, as we don't want any actual entity data, just the number
- // of skipped results.
- newQ := q.clone()
- newQ.keysOnly = len(newQ.projection) == 0
- newQ.limit = 0
- if q.limit < 0 {
- // If the original query was unlimited, set the new query's offset to maximum.
- newQ.offset = math.MaxInt32
- } else {
- newQ.offset = q.offset + q.limit
- if newQ.offset < 0 {
- // Do the best we can, in the presence of overflow.
- newQ.offset = math.MaxInt32
- }
- }
- req := &pb.Query{}
- if err := newQ.toProto(req, internal.FullyQualifiedAppID(c)); err != nil {
- return 0, err
- }
- res := &pb.QueryResult{}
- if err := internal.Call(c, "datastore_v3", "RunQuery", req, res); err != nil {
- return 0, err
- }
-
- // n is the count we will return. For example, suppose that our original
- // query had an offset of 4 and a limit of 2008: the count will be 2008,
- // provided that there are at least 2012 matching entities. However, the
- // RPCs will only skip 1000 results at a time. The RPC sequence is:
- // call RunQuery with (offset, limit) = (2012, 0) // 2012 == newQ.offset
- // response has (skippedResults, moreResults) = (1000, true)
- // n += 1000 // n == 1000
- // call Next with (offset, limit) = (1012, 0) // 1012 == newQ.offset - n
- // response has (skippedResults, moreResults) = (1000, true)
- // n += 1000 // n == 2000
- // call Next with (offset, limit) = (12, 0) // 12 == newQ.offset - n
- // response has (skippedResults, moreResults) = (12, false)
- // n += 12 // n == 2012
- // // exit the loop
- // n -= 4 // n == 2008
- var n int32
- for {
- // The QueryResult should have no actual entity data, just skipped results.
- if len(res.Result) != 0 {
- return 0, errors.New("datastore: internal error: Count request returned too much data")
- }
- n += res.GetSkippedResults()
- if !res.GetMoreResults() {
- break
- }
- if err := callNext(c, res, newQ.offset-n, 0); err != nil {
- return 0, err
- }
- }
- n -= q.offset
- if n < 0 {
- // If the offset was greater than the number of matching entities,
- // return 0 instead of negative.
- n = 0
- }
- return int(n), nil
-}
-
-// callNext issues a datastore_v3/Next RPC to advance a cursor, such as that
-// returned by a query with more results.
-func callNext(c context.Context, res *pb.QueryResult, offset, limit int32) error {
- if res.Cursor == nil {
- return errors.New("datastore: internal error: server did not return a cursor")
- }
- req := &pb.NextRequest{
- Cursor: res.Cursor,
- }
- if limit >= 0 {
- req.Count = proto.Int32(limit)
- }
- if offset != 0 {
- req.Offset = proto.Int32(offset)
- }
- if res.CompiledCursor != nil {
- req.Compile = proto.Bool(true)
- }
- res.Reset()
- return internal.Call(c, "datastore_v3", "Next", req, res)
-}
-
-// GetAll runs the query in the given context and returns all keys that match
-// that query, as well as appending the values to dst.
-//
-// dst must have type *[]S or *[]*S or *[]P, for some struct type S or some non-
-// interface, non-pointer type P such that P or *P implements PropertyLoadSaver.
-//
-// As a special case, *PropertyList is an invalid type for dst, even though a
-// PropertyList is a slice of structs. It is treated as invalid to avoid being
-// mistakenly passed when *[]PropertyList was intended.
-//
-// The keys returned by GetAll will be in a 1-1 correspondence with the entities
-// added to dst.
-//
-// If q is a ``keys-only'' query, GetAll ignores dst and only returns the keys.
-func (q *Query) GetAll(c context.Context, dst interface{}) ([]*Key, error) {
- var (
- dv reflect.Value
- mat multiArgType
- elemType reflect.Type
- errFieldMismatch error
- )
- if !q.keysOnly {
- dv = reflect.ValueOf(dst)
- if dv.Kind() != reflect.Ptr || dv.IsNil() {
- return nil, ErrInvalidEntityType
- }
- dv = dv.Elem()
- mat, elemType = checkMultiArg(dv)
- if mat == multiArgTypeInvalid || mat == multiArgTypeInterface {
- return nil, ErrInvalidEntityType
- }
- }
-
- var keys []*Key
- for t := q.Run(c); ; {
- k, e, err := t.next()
- if err == Done {
- break
- }
- if err != nil {
- return keys, err
- }
- if !q.keysOnly {
- ev := reflect.New(elemType)
- if elemType.Kind() == reflect.Map {
- // This is a special case. The zero values of a map type are
- // not immediately useful; they have to be make'd.
- //
- // Funcs and channels are similar, in that a zero value is not useful,
- // but even a freshly make'd channel isn't useful: there's no fixed
- // channel buffer size that is always going to be large enough, and
- // there's no goroutine to drain the other end. Theoretically, these
- // types could be supported, for example by sniffing for a constructor
- // method or requiring prior registration, but for now it's not a
- // frequent enough concern to be worth it. Programmers can work around
- // it by explicitly using Iterator.Next instead of the Query.GetAll
- // convenience method.
- x := reflect.MakeMap(elemType)
- ev.Elem().Set(x)
- }
- if err = loadEntity(ev.Interface(), e); err != nil {
- if _, ok := err.(*ErrFieldMismatch); ok {
- // We continue loading entities even in the face of field mismatch errors.
- // If we encounter any other error, that other error is returned. Otherwise,
- // an ErrFieldMismatch is returned.
- errFieldMismatch = err
- } else {
- return keys, err
- }
- }
- if mat != multiArgTypeStructPtr {
- ev = ev.Elem()
- }
- dv.Set(reflect.Append(dv, ev))
- }
- keys = append(keys, k)
- }
- return keys, errFieldMismatch
-}
-
-// Run runs the query in the given context.
-func (q *Query) Run(c context.Context) *Iterator {
- if q.err != nil {
- return &Iterator{err: q.err}
- }
- t := &Iterator{
- c: c,
- limit: q.limit,
- q: q,
- prevCC: q.start,
- }
- var req pb.Query
- if err := q.toProto(&req, internal.FullyQualifiedAppID(c)); err != nil {
- t.err = err
- return t
- }
- if err := internal.Call(c, "datastore_v3", "RunQuery", &req, &t.res); err != nil {
- t.err = err
- return t
- }
- offset := q.offset - t.res.GetSkippedResults()
- for offset > 0 && t.res.GetMoreResults() {
- t.prevCC = t.res.CompiledCursor
- if err := callNext(t.c, &t.res, offset, t.limit); err != nil {
- t.err = err
- break
- }
- skip := t.res.GetSkippedResults()
- if skip < 0 {
- t.err = errors.New("datastore: internal error: negative number of skipped_results")
- break
- }
- offset -= skip
- }
- if offset < 0 {
- t.err = errors.New("datastore: internal error: query offset was overshot")
- }
- return t
-}
-
-// Iterator is the result of running a query.
-type Iterator struct {
- c context.Context
- err error
- // res is the result of the most recent RunQuery or Next API call.
- res pb.QueryResult
- // i is how many elements of res.Result we have iterated over.
- i int
- // limit is the limit on the number of results this iterator should return.
- // A negative value means unlimited.
- limit int32
- // q is the original query which yielded this iterator.
- q *Query
- // prevCC is the compiled cursor that marks the end of the previous batch
- // of results.
- prevCC *pb.CompiledCursor
-}
-
-// Done is returned when a query iteration has completed.
-var Done = errors.New("datastore: query has no more results")
-
-// Next returns the key of the next result. When there are no more results,
-// Done is returned as the error.
-//
-// If the query is not keys only and dst is non-nil, it also loads the entity
-// stored for that key into the struct pointer or PropertyLoadSaver dst, with
-// the same semantics and possible errors as for the Get function.
-func (t *Iterator) Next(dst interface{}) (*Key, error) {
- k, e, err := t.next()
- if err != nil {
- return nil, err
- }
- if dst != nil && !t.q.keysOnly {
- err = loadEntity(dst, e)
- }
- return k, err
-}
-
-func (t *Iterator) next() (*Key, *pb.EntityProto, error) {
- if t.err != nil {
- return nil, nil, t.err
- }
-
- // Issue datastore_v3/Next RPCs as necessary.
- for t.i == len(t.res.Result) {
- if !t.res.GetMoreResults() {
- t.err = Done
- return nil, nil, t.err
- }
- t.prevCC = t.res.CompiledCursor
- if err := callNext(t.c, &t.res, 0, t.limit); err != nil {
- t.err = err
- return nil, nil, t.err
- }
- if t.res.GetSkippedResults() != 0 {
- t.err = errors.New("datastore: internal error: iterator has skipped results")
- return nil, nil, t.err
- }
- t.i = 0
- if t.limit >= 0 {
- t.limit -= int32(len(t.res.Result))
- if t.limit < 0 {
- t.err = errors.New("datastore: internal error: query returned more results than the limit")
- return nil, nil, t.err
- }
- }
- }
-
- // Extract the key from the t.i'th element of t.res.Result.
- e := t.res.Result[t.i]
- t.i++
- if e.Key == nil {
- return nil, nil, errors.New("datastore: internal error: server did not return a key")
- }
- k, err := protoToKey(e.Key)
- if err != nil || k.Incomplete() {
- return nil, nil, errors.New("datastore: internal error: server returned an invalid key")
- }
- return k, e, nil
-}
-
-// Cursor returns a cursor for the iterator's current location.
-func (t *Iterator) Cursor() (Cursor, error) {
- if t.err != nil && t.err != Done {
- return Cursor{}, t.err
- }
- // If we are at either end of the current batch of results,
- // return the compiled cursor at that end.
- skipped := t.res.GetSkippedResults()
- if t.i == 0 && skipped == 0 {
- if t.prevCC == nil {
- // A nil pointer (of type *pb.CompiledCursor) means no constraint:
- // passing it as the end cursor of a new query means unlimited results
- // (glossing over the integer limit parameter for now).
- // A non-nil pointer to an empty pb.CompiledCursor means the start:
- // passing it as the end cursor of a new query means 0 results.
- // If prevCC was nil, then the original query had no start cursor, but
- // Iterator.Cursor should return "the start" instead of unlimited.
- return Cursor{&zeroCC}, nil
- }
- return Cursor{t.prevCC}, nil
- }
- if t.i == len(t.res.Result) {
- return Cursor{t.res.CompiledCursor}, nil
- }
- // Otherwise, re-run the query offset to this iterator's position, starting from
- // the most recent compiled cursor. This is done on a best-effort basis, as it
- // is racy; if a concurrent process has added or removed entities, then the
- // cursor returned may be inconsistent.
- q := t.q.clone()
- q.start = t.prevCC
- q.offset = skipped + int32(t.i)
- q.limit = 0
- q.keysOnly = len(q.projection) == 0
- t1 := q.Run(t.c)
- _, _, err := t1.next()
- if err != Done {
- if err == nil {
- err = fmt.Errorf("datastore: internal error: zero-limit query did not have zero results")
- }
- return Cursor{}, err
- }
- return Cursor{t1.res.CompiledCursor}, nil
-}
-
-var zeroCC pb.CompiledCursor
-
-// Cursor is an iterator's position. It can be converted to and from an opaque
-// string. A cursor can be used from different HTTP requests, but only with a
-// query with the same kind, ancestor, filter and order constraints.
-type Cursor struct {
- cc *pb.CompiledCursor
-}
-
-// String returns a base-64 string representation of a cursor.
-func (c Cursor) String() string {
- if c.cc == nil {
- return ""
- }
- b, err := proto.Marshal(c.cc)
- if err != nil {
- // The only way to construct a Cursor with a non-nil cc field is to
- // unmarshal from the byte representation. We panic if the unmarshal
- // succeeds but the marshaling of the unchanged protobuf value fails.
- panic(fmt.Sprintf("datastore: internal error: malformed cursor: %v", err))
- }
- return strings.TrimRight(base64.URLEncoding.EncodeToString(b), "=")
-}
-
-// Decode decodes a cursor from its base-64 string representation.
-func DecodeCursor(s string) (Cursor, error) {
- if s == "" {
- return Cursor{&zeroCC}, nil
- }
- if n := len(s) % 4; n != 0 {
- s += strings.Repeat("=", 4-n)
- }
- b, err := base64.URLEncoding.DecodeString(s)
- if err != nil {
- return Cursor{}, err
- }
- cc := &pb.CompiledCursor{}
- if err := proto.Unmarshal(b, cc); err != nil {
- return Cursor{}, err
- }
- return Cursor{cc}, nil
-}
diff --git a/vendor/google.golang.org/appengine/datastore/save.go b/vendor/google.golang.org/appengine/datastore/save.go
deleted file mode 100644
index 6aeffb6..0000000
--- a/vendor/google.golang.org/appengine/datastore/save.go
+++ /dev/null
@@ -1,300 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "errors"
- "fmt"
- "math"
- "reflect"
- "time"
-
- "github.com/golang/protobuf/proto"
-
- "google.golang.org/appengine"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-func toUnixMicro(t time.Time) int64 {
- // We cannot use t.UnixNano() / 1e3 because we want to handle times more than
- // 2^63 nanoseconds (which is about 292 years) away from 1970, and those cannot
- // be represented in the numerator of a single int64 divide.
- return t.Unix()*1e6 + int64(t.Nanosecond()/1e3)
-}
-
-func fromUnixMicro(t int64) time.Time {
- return time.Unix(t/1e6, (t%1e6)*1e3)
-}
-
-var (
- minTime = time.Unix(int64(math.MinInt64)/1e6, (int64(math.MinInt64)%1e6)*1e3)
- maxTime = time.Unix(int64(math.MaxInt64)/1e6, (int64(math.MaxInt64)%1e6)*1e3)
-)
-
-// valueToProto converts a named value to a newly allocated Property.
-// The returned error string is empty on success.
-func valueToProto(defaultAppID, name string, v reflect.Value, multiple bool) (p *pb.Property, errStr string) {
- var (
- pv pb.PropertyValue
- unsupported bool
- )
- switch v.Kind() {
- case reflect.Invalid:
- // No-op.
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- pv.Int64Value = proto.Int64(v.Int())
- case reflect.Bool:
- pv.BooleanValue = proto.Bool(v.Bool())
- case reflect.String:
- pv.StringValue = proto.String(v.String())
- case reflect.Float32, reflect.Float64:
- pv.DoubleValue = proto.Float64(v.Float())
- case reflect.Ptr:
- if k, ok := v.Interface().(*Key); ok {
- if k != nil {
- pv.Referencevalue = keyToReferenceValue(defaultAppID, k)
- }
- } else {
- unsupported = true
- }
- case reflect.Struct:
- switch t := v.Interface().(type) {
- case time.Time:
- if t.Before(minTime) || t.After(maxTime) {
- return nil, "time value out of range"
- }
- pv.Int64Value = proto.Int64(toUnixMicro(t))
- case appengine.GeoPoint:
- if !t.Valid() {
- return nil, "invalid GeoPoint value"
- }
- // NOTE: Strangely, latitude maps to X, longitude to Y.
- pv.Pointvalue = &pb.PropertyValue_PointValue{X: &t.Lat, Y: &t.Lng}
- default:
- unsupported = true
- }
- case reflect.Slice:
- if b, ok := v.Interface().([]byte); ok {
- pv.StringValue = proto.String(string(b))
- } else {
- // nvToProto should already catch slice values.
- // If we get here, we have a slice of slice values.
- unsupported = true
- }
- default:
- unsupported = true
- }
- if unsupported {
- return nil, "unsupported datastore value type: " + v.Type().String()
- }
- p = &pb.Property{
- Name: proto.String(name),
- Value: &pv,
- Multiple: proto.Bool(multiple),
- }
- if v.IsValid() {
- switch v.Interface().(type) {
- case []byte:
- p.Meaning = pb.Property_BLOB.Enum()
- case ByteString:
- p.Meaning = pb.Property_BYTESTRING.Enum()
- case appengine.BlobKey:
- p.Meaning = pb.Property_BLOBKEY.Enum()
- case time.Time:
- p.Meaning = pb.Property_GD_WHEN.Enum()
- case appengine.GeoPoint:
- p.Meaning = pb.Property_GEORSS_POINT.Enum()
- }
- }
- return p, ""
-}
-
-// saveEntity saves an EntityProto into a PropertyLoadSaver or struct pointer.
-func saveEntity(defaultAppID string, key *Key, src interface{}) (*pb.EntityProto, error) {
- var err error
- var props []Property
- if e, ok := src.(PropertyLoadSaver); ok {
- props, err = e.Save()
- } else {
- props, err = SaveStruct(src)
- }
- if err != nil {
- return nil, err
- }
- return propertiesToProto(defaultAppID, key, props)
-}
-
-func saveStructProperty(props *[]Property, name string, noIndex, multiple bool, v reflect.Value) error {
- p := Property{
- Name: name,
- NoIndex: noIndex,
- Multiple: multiple,
- }
- switch x := v.Interface().(type) {
- case *Key:
- p.Value = x
- case time.Time:
- p.Value = x
- case appengine.BlobKey:
- p.Value = x
- case appengine.GeoPoint:
- p.Value = x
- case ByteString:
- p.Value = x
- default:
- switch v.Kind() {
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- p.Value = v.Int()
- case reflect.Bool:
- p.Value = v.Bool()
- case reflect.String:
- p.Value = v.String()
- case reflect.Float32, reflect.Float64:
- p.Value = v.Float()
- case reflect.Slice:
- if v.Type().Elem().Kind() == reflect.Uint8 {
- p.NoIndex = true
- p.Value = v.Bytes()
- }
- case reflect.Struct:
- if !v.CanAddr() {
- return fmt.Errorf("datastore: unsupported struct field: value is unaddressable")
- }
- sub, err := newStructPLS(v.Addr().Interface())
- if err != nil {
- return fmt.Errorf("datastore: unsupported struct field: %v", err)
- }
- return sub.(structPLS).save(props, name, noIndex, multiple)
- }
- }
- if p.Value == nil {
- return fmt.Errorf("datastore: unsupported struct field type: %v", v.Type())
- }
- *props = append(*props, p)
- return nil
-}
-
-func (s structPLS) Save() ([]Property, error) {
- var props []Property
- if err := s.save(&props, "", false, false); err != nil {
- return nil, err
- }
- return props, nil
-}
-
-func (s structPLS) save(props *[]Property, prefix string, noIndex, multiple bool) error {
- for i, t := range s.codec.byIndex {
- if t.name == "-" {
- continue
- }
- name := t.name
- if prefix != "" {
- name = prefix + name
- }
- v := s.v.Field(i)
- if !v.IsValid() || !v.CanSet() {
- continue
- }
- noIndex1 := noIndex || t.noIndex
- // For slice fields that aren't []byte, save each element.
- if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
- for j := 0; j < v.Len(); j++ {
- if err := saveStructProperty(props, name, noIndex1, true, v.Index(j)); err != nil {
- return err
- }
- }
- continue
- }
- // Otherwise, save the field itself.
- if err := saveStructProperty(props, name, noIndex1, multiple, v); err != nil {
- return err
- }
- }
- return nil
-}
-
-func propertiesToProto(defaultAppID string, key *Key, props []Property) (*pb.EntityProto, error) {
- e := &pb.EntityProto{
- Key: keyToProto(defaultAppID, key),
- }
- if key.parent == nil {
- e.EntityGroup = &pb.Path{}
- } else {
- e.EntityGroup = keyToProto(defaultAppID, key.root()).Path
- }
- prevMultiple := make(map[string]bool)
-
- for _, p := range props {
- if pm, ok := prevMultiple[p.Name]; ok {
- if !pm || !p.Multiple {
- return nil, fmt.Errorf("datastore: multiple Properties with Name %q, but Multiple is false", p.Name)
- }
- } else {
- prevMultiple[p.Name] = p.Multiple
- }
-
- x := &pb.Property{
- Name: proto.String(p.Name),
- Value: new(pb.PropertyValue),
- Multiple: proto.Bool(p.Multiple),
- }
- switch v := p.Value.(type) {
- case int64:
- x.Value.Int64Value = proto.Int64(v)
- case bool:
- x.Value.BooleanValue = proto.Bool(v)
- case string:
- x.Value.StringValue = proto.String(v)
- if p.NoIndex {
- x.Meaning = pb.Property_TEXT.Enum()
- }
- case float64:
- x.Value.DoubleValue = proto.Float64(v)
- case *Key:
- if v != nil {
- x.Value.Referencevalue = keyToReferenceValue(defaultAppID, v)
- }
- case time.Time:
- if v.Before(minTime) || v.After(maxTime) {
- return nil, fmt.Errorf("datastore: time value out of range")
- }
- x.Value.Int64Value = proto.Int64(toUnixMicro(v))
- x.Meaning = pb.Property_GD_WHEN.Enum()
- case appengine.BlobKey:
- x.Value.StringValue = proto.String(string(v))
- x.Meaning = pb.Property_BLOBKEY.Enum()
- case appengine.GeoPoint:
- if !v.Valid() {
- return nil, fmt.Errorf("datastore: invalid GeoPoint value")
- }
- // NOTE: Strangely, latitude maps to X, longitude to Y.
- x.Value.Pointvalue = &pb.PropertyValue_PointValue{X: &v.Lat, Y: &v.Lng}
- x.Meaning = pb.Property_GEORSS_POINT.Enum()
- case []byte:
- x.Value.StringValue = proto.String(string(v))
- x.Meaning = pb.Property_BLOB.Enum()
- if !p.NoIndex {
- return nil, fmt.Errorf("datastore: cannot index a []byte valued Property with Name %q", p.Name)
- }
- case ByteString:
- x.Value.StringValue = proto.String(string(v))
- x.Meaning = pb.Property_BYTESTRING.Enum()
- default:
- if p.Value != nil {
- return nil, fmt.Errorf("datastore: invalid Value type for a Property with Name %q", p.Name)
- }
- }
-
- if p.NoIndex {
- e.RawProperty = append(e.RawProperty, x)
- } else {
- e.Property = append(e.Property, x)
- if len(e.Property) > maxIndexedProperties {
- return nil, errors.New("datastore: too many indexed properties")
- }
- }
- }
- return e, nil
-}
diff --git a/vendor/google.golang.org/appengine/datastore/transaction.go b/vendor/google.golang.org/appengine/datastore/transaction.go
deleted file mode 100644
index a7f3f2b..0000000
--- a/vendor/google.golang.org/appengine/datastore/transaction.go
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package datastore
-
-import (
- "errors"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-func init() {
- internal.RegisterTransactionSetter(func(x *pb.Query, t *pb.Transaction) {
- x.Transaction = t
- })
- internal.RegisterTransactionSetter(func(x *pb.GetRequest, t *pb.Transaction) {
- x.Transaction = t
- })
- internal.RegisterTransactionSetter(func(x *pb.PutRequest, t *pb.Transaction) {
- x.Transaction = t
- })
- internal.RegisterTransactionSetter(func(x *pb.DeleteRequest, t *pb.Transaction) {
- x.Transaction = t
- })
-}
-
-// ErrConcurrentTransaction is returned when a transaction is rolled back due
-// to a conflict with a concurrent transaction.
-var ErrConcurrentTransaction = errors.New("datastore: concurrent transaction")
-
-// RunInTransaction runs f in a transaction. It calls f with a transaction
-// context tc that f should use for all App Engine operations.
-//
-// If f returns nil, RunInTransaction attempts to commit the transaction,
-// returning nil if it succeeds. If the commit fails due to a conflicting
-// transaction, RunInTransaction retries f, each time with a new transaction
-// context. It gives up and returns ErrConcurrentTransaction after three
-// failed attempts. The number of attempts can be configured by specifying
-// TransactionOptions.Attempts.
-//
-// If f returns non-nil, then any datastore changes will not be applied and
-// RunInTransaction returns that same error. The function f is not retried.
-//
-// Note that when f returns, the transaction is not yet committed. Calling code
-// must be careful not to assume that any of f's changes have been committed
-// until RunInTransaction returns nil.
-//
-// Since f may be called multiple times, f should usually be idempotent.
-// datastore.Get is not idempotent when unmarshaling slice fields.
-//
-// Nested transactions are not supported; c may not be a transaction context.
-func RunInTransaction(c context.Context, f func(tc context.Context) error, opts *TransactionOptions) error {
- xg := false
- if opts != nil {
- xg = opts.XG
- }
- attempts := 3
- if opts != nil && opts.Attempts > 0 {
- attempts = opts.Attempts
- }
- for i := 0; i < attempts; i++ {
- if err := internal.RunTransactionOnce(c, f, xg); err != internal.ErrConcurrentTransaction {
- return err
- }
- }
- return ErrConcurrentTransaction
-}
-
-// TransactionOptions are the options for running a transaction.
-type TransactionOptions struct {
- // XG is whether the transaction can cross multiple entity groups. In
- // comparison, a single group transaction is one where all datastore keys
- // used have the same root key. Note that cross group transactions do not
- // have the same behavior as single group transactions. In particular, it
- // is much more likely to see partially applied transactions in different
- // entity groups, in global queries.
- // It is valid to set XG to true even if the transaction is within a
- // single entity group.
- XG bool
- // Attempts controls the number of retries to perform when commits fail
- // due to a conflicting transaction. If omitted, it defaults to 3.
- Attempts int
-}
diff --git a/vendor/google.golang.org/appengine/delay/delay.go b/vendor/google.golang.org/appengine/delay/delay.go
deleted file mode 100644
index 6a71da7..0000000
--- a/vendor/google.golang.org/appengine/delay/delay.go
+++ /dev/null
@@ -1,275 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package delay provides a way to execute code outside the scope of a
-user request by using the taskqueue API.
-
-To declare a function that may be executed later, call Func
-in a top-level assignment context, passing it an arbitrary string key
-and a function whose first argument is of type context.Context.
- var laterFunc = delay.Func("key", myFunc)
-It is also possible to use a function literal.
- var laterFunc = delay.Func("key", func(c context.Context, x string) {
- // ...
- })
-
-To call a function, invoke its Call method.
- laterFunc.Call(c, "something")
-A function may be called any number of times. If the function has any
-return arguments, and the last one is of type error, the function may
-return a non-nil error to signal that the function should be retried.
-
-The arguments to functions may be of any type that is encodable by the gob
-package. If an argument is of interface type, it is the client's responsibility
-to register with the gob package whatever concrete type may be passed for that
-argument; see http://golang.org/pkg/gob/#Register for details.
-
-Any errors during initialization or execution of a function will be
-logged to the application logs. Error logs that occur during initialization will
-be associated with the request that invoked the Call method.
-
-The state of a function invocation that has not yet successfully
-executed is preserved by combining the file name in which it is declared
-with the string key that was passed to the Func function. Updating an app
-with pending function invocations is safe as long as the relevant
-functions have the (filename, key) combination preserved.
-
-The delay package uses the Task Queue API to create tasks that call the
-reserved application path "/_ah/queue/go/delay".
-This path must not be marked as "login: required" in app.yaml;
-it must be marked as "login: admin" or have no access restriction.
-*/
-package delay
-
-import (
- "bytes"
- "encoding/gob"
- "errors"
- "fmt"
- "net/http"
- "reflect"
- "runtime"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/log"
- "google.golang.org/appengine/taskqueue"
-)
-
-// Function represents a function that may have a delayed invocation.
-type Function struct {
- fv reflect.Value // Kind() == reflect.Func
- key string
- err error // any error during initialization
-}
-
-const (
- // The HTTP path for invocations.
- path = "/_ah/queue/go/delay"
- // Use the default queue.
- queue = ""
-)
-
-var (
- // registry of all delayed functions
- funcs = make(map[string]*Function)
-
- // precomputed types
- contextType = reflect.TypeOf((*context.Context)(nil)).Elem()
- errorType = reflect.TypeOf((*error)(nil)).Elem()
-
- // errors
- errFirstArg = errors.New("first argument must be context.Context")
-)
-
-// Func declares a new Function. The second argument must be a function with a
-// first argument of type context.Context.
-// This function must be called at program initialization time. That means it
-// must be called in a global variable declaration or from an init function.
-// This restriction is necessary because the instance that delays a function
-// call may not be the one that executes it. Only the code executed at program
-// initialization time is guaranteed to have been run by an instance before it
-// receives a request.
-func Func(key string, i interface{}) *Function {
- f := &Function{fv: reflect.ValueOf(i)}
-
- // Derive unique, somewhat stable key for this func.
- _, file, _, _ := runtime.Caller(1)
- f.key = file + ":" + key
-
- t := f.fv.Type()
- if t.Kind() != reflect.Func {
- f.err = errors.New("not a function")
- return f
- }
- if t.NumIn() == 0 || t.In(0) != contextType {
- f.err = errFirstArg
- return f
- }
-
- // Register the function's arguments with the gob package.
- // This is required because they are marshaled inside a []interface{}.
- // gob.Register only expects to be called during initialization;
- // that's fine because this function expects the same.
- for i := 0; i < t.NumIn(); i++ {
- // Only concrete types may be registered. If the argument has
- // interface type, the client is resposible for registering the
- // concrete types it will hold.
- if t.In(i).Kind() == reflect.Interface {
- continue
- }
- gob.Register(reflect.Zero(t.In(i)).Interface())
- }
-
- funcs[f.key] = f
- return f
-}
-
-type invocation struct {
- Key string
- Args []interface{}
-}
-
-// Call invokes a delayed function.
-// err := f.Call(c, ...)
-// is equivalent to
-// t, _ := f.Task(...)
-// err := taskqueue.Add(c, t, "")
-func (f *Function) Call(c context.Context, args ...interface{}) error {
- t, err := f.Task(args...)
- if err != nil {
- return err
- }
- _, err = taskqueueAdder(c, t, queue)
- return err
-}
-
-// Task creates a Task that will invoke the function.
-// Its parameters may be tweaked before adding it to a queue.
-// Users should not modify the Path or Payload fields of the returned Task.
-func (f *Function) Task(args ...interface{}) (*taskqueue.Task, error) {
- if f.err != nil {
- return nil, fmt.Errorf("delay: func is invalid: %v", f.err)
- }
-
- nArgs := len(args) + 1 // +1 for the context.Context
- ft := f.fv.Type()
- minArgs := ft.NumIn()
- if ft.IsVariadic() {
- minArgs--
- }
- if nArgs < minArgs {
- return nil, fmt.Errorf("delay: too few arguments to func: %d < %d", nArgs, minArgs)
- }
- if !ft.IsVariadic() && nArgs > minArgs {
- return nil, fmt.Errorf("delay: too many arguments to func: %d > %d", nArgs, minArgs)
- }
-
- // Check arg types.
- for i := 1; i < nArgs; i++ {
- at := reflect.TypeOf(args[i-1])
- var dt reflect.Type
- if i < minArgs {
- // not a variadic arg
- dt = ft.In(i)
- } else {
- // a variadic arg
- dt = ft.In(minArgs).Elem()
- }
- // nil arguments won't have a type, so they need special handling.
- if at == nil {
- // nil interface
- switch dt.Kind() {
- case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
- continue // may be nil
- }
- return nil, fmt.Errorf("delay: argument %d has wrong type: %v is not nilable", i, dt)
- }
- switch at.Kind() {
- case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
- av := reflect.ValueOf(args[i-1])
- if av.IsNil() {
- // nil value in interface; not supported by gob, so we replace it
- // with a nil interface value
- args[i-1] = nil
- }
- }
- if !at.AssignableTo(dt) {
- return nil, fmt.Errorf("delay: argument %d has wrong type: %v is not assignable to %v", i, at, dt)
- }
- }
-
- inv := invocation{
- Key: f.key,
- Args: args,
- }
-
- buf := new(bytes.Buffer)
- if err := gob.NewEncoder(buf).Encode(inv); err != nil {
- return nil, fmt.Errorf("delay: gob encoding failed: %v", err)
- }
-
- return &taskqueue.Task{
- Path: path,
- Payload: buf.Bytes(),
- }, nil
-}
-
-var taskqueueAdder = taskqueue.Add // for testing
-
-func init() {
- http.HandleFunc(path, func(w http.ResponseWriter, req *http.Request) {
- runFunc(appengine.NewContext(req), w, req)
- })
-}
-
-func runFunc(c context.Context, w http.ResponseWriter, req *http.Request) {
- defer req.Body.Close()
-
- var inv invocation
- if err := gob.NewDecoder(req.Body).Decode(&inv); err != nil {
- log.Errorf(c, "delay: failed decoding task payload: %v", err)
- log.Warningf(c, "delay: dropping task")
- return
- }
-
- f := funcs[inv.Key]
- if f == nil {
- log.Errorf(c, "delay: no func with key %q found", inv.Key)
- log.Warningf(c, "delay: dropping task")
- return
- }
-
- ft := f.fv.Type()
- in := []reflect.Value{reflect.ValueOf(c)}
- for _, arg := range inv.Args {
- var v reflect.Value
- if arg != nil {
- v = reflect.ValueOf(arg)
- } else {
- // Task was passed a nil argument, so we must construct
- // the zero value for the argument here.
- n := len(in) // we're constructing the nth argument
- var at reflect.Type
- if !ft.IsVariadic() || n < ft.NumIn()-1 {
- at = ft.In(n)
- } else {
- at = ft.In(ft.NumIn() - 1).Elem()
- }
- v = reflect.Zero(at)
- }
- in = append(in, v)
- }
- out := f.fv.Call(in)
-
- if n := ft.NumOut(); n > 0 && ft.Out(n-1) == errorType {
- if errv := out[n-1]; !errv.IsNil() {
- log.Errorf(c, "delay: func failed (will retry): %v", errv.Interface())
- w.WriteHeader(http.StatusInternalServerError)
- return
- }
- }
-}
diff --git a/vendor/google.golang.org/appengine/demos/guestbook/app.yaml b/vendor/google.golang.org/appengine/demos/guestbook/app.yaml
deleted file mode 100644
index 95c088f..0000000
--- a/vendor/google.golang.org/appengine/demos/guestbook/app.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-# Demo application for Managed VMs.
-runtime: go
-vm: true
-api_version: go1
-
-manual_scaling:
- instances: 1
-
-handlers:
-# Favicon. Without this, the browser hits this once per page view.
-- url: /favicon.ico
- static_files: favicon.ico
- upload: favicon.ico
-
-# Main app. All the real work is here.
-- url: /.*
- script: _go_app
diff --git a/vendor/google.golang.org/appengine/demos/guestbook/favicon.ico b/vendor/google.golang.org/appengine/demos/guestbook/favicon.ico
deleted file mode 100644
index 1a71ea772e972df2e955b36261ae5d7f53b9c9b1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 1150
zcmd5)OKVd>6rNI{3l|0|#f50WO+XjL$3`~+!3T;Ix^p413yHRhmS9^&ywzgVMH)<-
zCQV34A4!wjylP%GkDGUz=QT;NG>gb*8n4`ye3#{^zkce45EvUvW9N8Y#yV5-i2?n|gRoZc<%s
zmh~rn+mM*?Ph4ge?;K&MO=5dH$Y(hhHh2y-K8|XULpI_@BFLhc^dYyZ;RQd6ULnX%
zY7XBrdX%kq;dvp(g8Ue4lb2A6TCi0~Be~{)e`OwVpB?PH2D#WOBIv*k9@h8svMjN%LB8=hT3X!a(GF&~^uI=HQRRDv3$W^b7s@-uyV
zh0r)6|MU>DZWSsYRM^NkQI4_jJUxMR7lX9x9lUlU?B*HdJ=56ZweCUP$ZoY9rFF+p
zujNrIgppL7LdhyaA;coEVs7#ao|(V$&G-5wg`mF4|60vrXX_&(76p9^7qVeblj~)T
zDEamE)_Ys!wZ}cExSr6rOJIAGMbZ`|
-
-
- Guestbook Demo
-
-
-
- {{with .Email}}You are currently logged in as {{.}}.{{end}}
- {{with .Login}}Sign in{{end}}
- {{with .Logout}}Sign out{{end}}
-
-
- {{range .Greetings }}
-
- {{with .Author}}{{.}}{{else}}An anonymous person{{end}}
- on {{.Date.Format "3:04pm, Mon 2 Jan"}}
- wrote
{{.Content}}
-
- {{end}}
-
-
-
-
diff --git a/vendor/google.golang.org/appengine/demos/helloworld/app.yaml b/vendor/google.golang.org/appengine/demos/helloworld/app.yaml
deleted file mode 100644
index e0b2eb7..0000000
--- a/vendor/google.golang.org/appengine/demos/helloworld/app.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-runtime: go
-api_version: go1
-vm: true
-
-manual_scaling:
- instances: 1
-
-handlers:
-- url: /favicon.ico
- static_files: favicon.ico
- upload: favicon.ico
-- url: /.*
- script: _go_app
diff --git a/vendor/google.golang.org/appengine/demos/helloworld/favicon.ico b/vendor/google.golang.org/appengine/demos/helloworld/favicon.ico
deleted file mode 100644
index f19c04d270a3865384ce3db41412448692b8cba4..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 1150
zcmchVT}YE*6vvO#Ozpx+V3I*aL_(n#kx)ooMSh^NcA;pHT?E=LQZz`!FSHCRr@prN
zwT3p)%=s-dmt~u}Ev?O|`zZYCq8qyiy0L=y-}5?0YR${e%Q?^Uob&&^@ADoGkso`+
zVq)QuG~pS#!VCV*}8%$~So~Xo7Z}fn#{=kyT1ep!Zb
zv1b!}`L%0%gZ-u8{86F};i`UY4wfg*lK=n!
diff --git a/vendor/google.golang.org/appengine/demos/helloworld/helloworld.go b/vendor/google.golang.org/appengine/demos/helloworld/helloworld.go
deleted file mode 100644
index fcd9b0b..0000000
--- a/vendor/google.golang.org/appengine/demos/helloworld/helloworld.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// This example only works on Managed VMs.
-// +build !appengine
-
-package main
-
-import (
- "html/template"
- "net/http"
- "time"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/log"
-)
-
-var initTime = time.Now()
-
-func main() {
- http.HandleFunc("/", handle)
- appengine.Main()
-}
-
-func handle(w http.ResponseWriter, r *http.Request) {
- if r.URL.Path != "/" {
- http.NotFound(w, r)
- return
- }
-
- ctx := appengine.NewContext(r)
- log.Infof(ctx, "Serving the front page.")
-
- tmpl.Execute(w, time.Since(initTime))
-}
-
-var tmpl = template.Must(template.New("front").Parse(`
-
-
-
-Hello, World! 세상아 안녕!
-
-
-
-This instance has been running for {{.}}.
-
-
-
-`))
diff --git a/vendor/google.golang.org/appengine/errors.go b/vendor/google.golang.org/appengine/errors.go
deleted file mode 100644
index 16d0772..0000000
--- a/vendor/google.golang.org/appengine/errors.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// This file provides error functions for common API failure modes.
-
-package appengine
-
-import (
- "fmt"
-
- "google.golang.org/appengine/internal"
-)
-
-// IsOverQuota reports whether err represents an API call failure
-// due to insufficient available quota.
-func IsOverQuota(err error) bool {
- callErr, ok := err.(*internal.CallError)
- return ok && callErr.Code == 4
-}
-
-// MultiError is returned by batch operations when there are errors with
-// particular elements. Errors will be in a one-to-one correspondence with
-// the input elements; successful elements will have a nil entry.
-type MultiError []error
-
-func (m MultiError) Error() string {
- s, n := "", 0
- for _, e := range m {
- if e != nil {
- if n == 0 {
- s = e.Error()
- }
- n++
- }
- }
- switch n {
- case 0:
- return "(0 errors)"
- case 1:
- return s
- case 2:
- return s + " (and 1 other error)"
- }
- return fmt.Sprintf("%s (and %d other errors)", s, n-1)
-}
diff --git a/vendor/google.golang.org/appengine/file/file.go b/vendor/google.golang.org/appengine/file/file.go
deleted file mode 100644
index c3cd58b..0000000
--- a/vendor/google.golang.org/appengine/file/file.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package file provides helper functions for using Google Cloud Storage.
-package file
-
-import (
- "fmt"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- aipb "google.golang.org/appengine/internal/app_identity"
-)
-
-// DefaultBucketName returns the name of this application's
-// default Google Cloud Storage bucket.
-func DefaultBucketName(c context.Context) (string, error) {
- req := &aipb.GetDefaultGcsBucketNameRequest{}
- res := &aipb.GetDefaultGcsBucketNameResponse{}
-
- err := internal.Call(c, "app_identity_service", "GetDefaultGcsBucketName", req, res)
- if err != nil {
- return "", fmt.Errorf("file: no default bucket name returned in RPC response: %v", res)
- }
- return res.GetDefaultGcsBucketName(), nil
-}
diff --git a/vendor/google.golang.org/appengine/identity.go b/vendor/google.golang.org/appengine/identity.go
deleted file mode 100644
index b8dcf8f..0000000
--- a/vendor/google.golang.org/appengine/identity.go
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package appengine
-
-import (
- "time"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/app_identity"
- modpb "google.golang.org/appengine/internal/modules"
-)
-
-// AppID returns the application ID for the current application.
-// The string will be a plain application ID (e.g. "appid"), with a
-// domain prefix for custom domain deployments (e.g. "example.com:appid").
-func AppID(c context.Context) string { return internal.AppID(c) }
-
-// DefaultVersionHostname returns the standard hostname of the default version
-// of the current application (e.g. "my-app.appspot.com"). This is suitable for
-// use in constructing URLs.
-func DefaultVersionHostname(c context.Context) string {
- return internal.DefaultVersionHostname(c)
-}
-
-// ModuleName returns the module name of the current instance.
-func ModuleName(c context.Context) string {
- return internal.ModuleName(c)
-}
-
-// ModuleHostname returns a hostname of a module instance.
-// If module is the empty string, it refers to the module of the current instance.
-// If version is empty, it refers to the version of the current instance if valid,
-// or the default version of the module of the current instance.
-// If instance is empty, ModuleHostname returns the load-balancing hostname.
-func ModuleHostname(c context.Context, module, version, instance string) (string, error) {
- req := &modpb.GetHostnameRequest{}
- if module != "" {
- req.Module = &module
- }
- if version != "" {
- req.Version = &version
- }
- if instance != "" {
- req.Instance = &instance
- }
- res := &modpb.GetHostnameResponse{}
- if err := internal.Call(c, "modules", "GetHostname", req, res); err != nil {
- return "", err
- }
- return *res.Hostname, nil
-}
-
-// VersionID returns the version ID for the current application.
-// It will be of the form "X.Y", where X is specified in app.yaml,
-// and Y is a number generated when each version of the app is uploaded.
-// It does not include a module name.
-func VersionID(c context.Context) string { return internal.VersionID(c) }
-
-// InstanceID returns a mostly-unique identifier for this instance.
-func InstanceID() string { return internal.InstanceID() }
-
-// Datacenter returns an identifier for the datacenter that the instance is running in.
-func Datacenter(c context.Context) string { return internal.Datacenter(c) }
-
-// ServerSoftware returns the App Engine release version.
-// In production, it looks like "Google App Engine/X.Y.Z".
-// In the development appserver, it looks like "Development/X.Y".
-func ServerSoftware() string { return internal.ServerSoftware() }
-
-// RequestID returns a string that uniquely identifies the request.
-func RequestID(c context.Context) string { return internal.RequestID(c) }
-
-// AccessToken generates an OAuth2 access token for the specified scopes on
-// behalf of service account of this application. This token will expire after
-// the returned time.
-func AccessToken(c context.Context, scopes ...string) (token string, expiry time.Time, err error) {
- req := &pb.GetAccessTokenRequest{Scope: scopes}
- res := &pb.GetAccessTokenResponse{}
-
- err = internal.Call(c, "app_identity_service", "GetAccessToken", req, res)
- if err != nil {
- return "", time.Time{}, err
- }
- return res.GetAccessToken(), time.Unix(res.GetExpirationTime(), 0), nil
-}
-
-// Certificate represents a public certificate for the app.
-type Certificate struct {
- KeyName string
- Data []byte // PEM-encoded X.509 certificate
-}
-
-// PublicCertificates retrieves the public certificates for the app.
-// They can be used to verify a signature returned by SignBytes.
-func PublicCertificates(c context.Context) ([]Certificate, error) {
- req := &pb.GetPublicCertificateForAppRequest{}
- res := &pb.GetPublicCertificateForAppResponse{}
- if err := internal.Call(c, "app_identity_service", "GetPublicCertificatesForApp", req, res); err != nil {
- return nil, err
- }
- var cs []Certificate
- for _, pc := range res.PublicCertificateList {
- cs = append(cs, Certificate{
- KeyName: pc.GetKeyName(),
- Data: []byte(pc.GetX509CertificatePem()),
- })
- }
- return cs, nil
-}
-
-// ServiceAccount returns a string representing the service account name, in
-// the form of an email address (typically app_id@appspot.gserviceaccount.com).
-func ServiceAccount(c context.Context) (string, error) {
- req := &pb.GetServiceAccountNameRequest{}
- res := &pb.GetServiceAccountNameResponse{}
-
- err := internal.Call(c, "app_identity_service", "GetServiceAccountName", req, res)
- if err != nil {
- return "", err
- }
- return res.GetServiceAccountName(), err
-}
-
-// SignBytes signs bytes using a private key unique to your application.
-func SignBytes(c context.Context, bytes []byte) (keyName string, signature []byte, err error) {
- req := &pb.SignForAppRequest{BytesToSign: bytes}
- res := &pb.SignForAppResponse{}
-
- if err := internal.Call(c, "app_identity_service", "SignForApp", req, res); err != nil {
- return "", nil, err
- }
- return res.GetKeyName(), res.GetSignatureBytes(), nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("app_identity_service", pb.AppIdentityServiceError_ErrorCode_name)
- internal.RegisterErrorCodeMap("modules", modpb.ModulesServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/image/image.go b/vendor/google.golang.org/appengine/image/image.go
deleted file mode 100644
index 780d53e..0000000
--- a/vendor/google.golang.org/appengine/image/image.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package image provides image services.
-package image
-
-import (
- "fmt"
- "net/url"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/image"
-)
-
-type ServingURLOptions struct {
- Secure bool // whether the URL should use HTTPS
-
- // Size must be between zero and 1600.
- // If Size is non-zero, a resized version of the image is served,
- // and Size is the served image's longest dimension. The aspect ratio is preserved.
- // If Crop is true the image is cropped from the center instead of being resized.
- Size int
- Crop bool
-}
-
-// ServingURL returns a URL that will serve an image from Blobstore.
-func ServingURL(c context.Context, key appengine.BlobKey, opts *ServingURLOptions) (*url.URL, error) {
- req := &pb.ImagesGetUrlBaseRequest{
- BlobKey: (*string)(&key),
- }
- if opts != nil && opts.Secure {
- req.CreateSecureUrl = &opts.Secure
- }
- res := &pb.ImagesGetUrlBaseResponse{}
- if err := internal.Call(c, "images", "GetUrlBase", req, res); err != nil {
- return nil, err
- }
-
- // The URL may have suffixes added to dynamically resize or crop:
- // - adding "=s32" will serve the image resized to 32 pixels, preserving the aspect ratio.
- // - adding "=s32-c" is the same as "=s32" except it will be cropped.
- u := *res.Url
- if opts != nil && opts.Size > 0 {
- u += fmt.Sprintf("=s%d", opts.Size)
- if opts.Crop {
- u += "-c"
- }
- }
- return url.Parse(u)
-}
-
-// DeleteServingURL deletes the serving URL for an image.
-func DeleteServingURL(c context.Context, key appengine.BlobKey) error {
- req := &pb.ImagesDeleteUrlBaseRequest{
- BlobKey: (*string)(&key),
- }
- res := &pb.ImagesDeleteUrlBaseResponse{}
- return internal.Call(c, "images", "DeleteUrlBase", req, res)
-}
-
-func init() {
- internal.RegisterErrorCodeMap("images", pb.ImagesServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/internal/aetesting/fake.go b/vendor/google.golang.org/appengine/internal/aetesting/fake.go
deleted file mode 100644
index 6f5c197..0000000
--- a/vendor/google.golang.org/appengine/internal/aetesting/fake.go
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package aetesting provides utilities for testing App Engine packages.
-// This is not for testing user applications.
-package aetesting
-
-import (
- "fmt"
- "reflect"
- "testing"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// FakeSingleContext returns a context whose Call invocations will be serviced
-// by f, which should be a function that has two arguments of the input and output
-// protocol buffer type, and one error return.
-func FakeSingleContext(t *testing.T, service, method string, f interface{}) context.Context {
- fv := reflect.ValueOf(f)
- if fv.Kind() != reflect.Func {
- t.Fatal("not a function")
- }
- ft := fv.Type()
- if ft.NumIn() != 2 || ft.NumOut() != 1 {
- t.Fatalf("f has %d in and %d out, want 2 in and 1 out", ft.NumIn(), ft.NumOut())
- }
- for i := 0; i < 2; i++ {
- at := ft.In(i)
- if !at.Implements(protoMessageType) {
- t.Fatalf("arg %d does not implement proto.Message", i)
- }
- }
- if ft.Out(0) != errorType {
- t.Fatalf("f's return is %v, want error", ft.Out(0))
- }
- s := &single{
- t: t,
- service: service,
- method: method,
- f: fv,
- }
- return internal.WithCallOverride(context.Background(), s.call)
-}
-
-var (
- protoMessageType = reflect.TypeOf((*proto.Message)(nil)).Elem()
- errorType = reflect.TypeOf((*error)(nil)).Elem()
-)
-
-type single struct {
- t *testing.T
- service, method string
- f reflect.Value
-}
-
-func (s *single) call(ctx context.Context, service, method string, in, out proto.Message) error {
- if service == "__go__" {
- if method == "GetNamespace" {
- return nil // always yield an empty namespace
- }
- return fmt.Errorf("Unknown API call /%s.%s", service, method)
- }
- if service != s.service || method != s.method {
- s.t.Fatalf("Unexpected call to /%s.%s", service, method)
- }
- ins := []reflect.Value{
- reflect.ValueOf(in),
- reflect.ValueOf(out),
- }
- outs := s.f.Call(ins)
- if outs[0].IsNil() {
- return nil
- }
- return outs[0].Interface().(error)
-}
diff --git a/vendor/google.golang.org/appengine/internal/api.go b/vendor/google.golang.org/appengine/internal/api.go
deleted file mode 100644
index aa139d4..0000000
--- a/vendor/google.golang.org/appengine/internal/api.go
+++ /dev/null
@@ -1,640 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package internal
-
-import (
- "bytes"
- "errors"
- "fmt"
- "io/ioutil"
- "log"
- "net"
- "net/http"
- "net/url"
- "os"
- "runtime"
- "strconv"
- "strings"
- "sync"
- "sync/atomic"
- "time"
-
- "github.com/golang/protobuf/proto"
- netcontext "golang.org/x/net/context"
-
- basepb "google.golang.org/appengine/internal/base"
- logpb "google.golang.org/appengine/internal/log"
- remotepb "google.golang.org/appengine/internal/remote_api"
-)
-
-const (
- apiPath = "/rpc_http"
-)
-
-var (
- // Incoming headers.
- ticketHeader = http.CanonicalHeaderKey("X-AppEngine-API-Ticket")
- dapperHeader = http.CanonicalHeaderKey("X-Google-DapperTraceInfo")
- traceHeader = http.CanonicalHeaderKey("X-Cloud-Trace-Context")
- curNamespaceHeader = http.CanonicalHeaderKey("X-AppEngine-Current-Namespace")
- userIPHeader = http.CanonicalHeaderKey("X-AppEngine-User-IP")
- remoteAddrHeader = http.CanonicalHeaderKey("X-AppEngine-Remote-Addr")
-
- // Outgoing headers.
- apiEndpointHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Endpoint")
- apiEndpointHeaderValue = []string{"app-engine-apis"}
- apiMethodHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Method")
- apiMethodHeaderValue = []string{"/VMRemoteAPI.CallRemoteAPI"}
- apiDeadlineHeader = http.CanonicalHeaderKey("X-Google-RPC-Service-Deadline")
- apiContentType = http.CanonicalHeaderKey("Content-Type")
- apiContentTypeValue = []string{"application/octet-stream"}
- logFlushHeader = http.CanonicalHeaderKey("X-AppEngine-Log-Flush-Count")
-
- apiHTTPClient = &http.Client{
- Transport: &http.Transport{
- Proxy: http.ProxyFromEnvironment,
- Dial: limitDial,
- },
- }
-)
-
-func apiURL() *url.URL {
- host, port := "appengine.googleapis.internal", "10001"
- if h := os.Getenv("API_HOST"); h != "" {
- host = h
- }
- if p := os.Getenv("API_PORT"); p != "" {
- port = p
- }
- return &url.URL{
- Scheme: "http",
- Host: host + ":" + port,
- Path: apiPath,
- }
-}
-
-func handleHTTP(w http.ResponseWriter, r *http.Request) {
- c := &context{
- req: r,
- outHeader: w.Header(),
- apiURL: apiURL(),
- }
- stopFlushing := make(chan int)
-
- ctxs.Lock()
- ctxs.m[r] = c
- ctxs.Unlock()
- defer func() {
- ctxs.Lock()
- delete(ctxs.m, r)
- ctxs.Unlock()
- }()
-
- // Patch up RemoteAddr so it looks reasonable.
- if addr := r.Header.Get(userIPHeader); addr != "" {
- r.RemoteAddr = addr
- } else if addr = r.Header.Get(remoteAddrHeader); addr != "" {
- r.RemoteAddr = addr
- } else {
- // Should not normally reach here, but pick a sensible default anyway.
- r.RemoteAddr = "127.0.0.1"
- }
- // The address in the headers will most likely be of these forms:
- // 123.123.123.123
- // 2001:db8::1
- // net/http.Request.RemoteAddr is specified to be in "IP:port" form.
- if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil {
- // Assume the remote address is only a host; add a default port.
- r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80")
- }
-
- // Start goroutine responsible for flushing app logs.
- // This is done after adding c to ctx.m (and stopped before removing it)
- // because flushing logs requires making an API call.
- go c.logFlusher(stopFlushing)
-
- executeRequestSafely(c, r)
- c.outHeader = nil // make sure header changes aren't respected any more
-
- stopFlushing <- 1 // any logging beyond this point will be dropped
-
- // Flush any pending logs asynchronously.
- c.pendingLogs.Lock()
- flushes := c.pendingLogs.flushes
- if len(c.pendingLogs.lines) > 0 {
- flushes++
- }
- c.pendingLogs.Unlock()
- go c.flushLog(false)
- w.Header().Set(logFlushHeader, strconv.Itoa(flushes))
-
- // Avoid nil Write call if c.Write is never called.
- if c.outCode != 0 {
- w.WriteHeader(c.outCode)
- }
- if c.outBody != nil {
- w.Write(c.outBody)
- }
-}
-
-func executeRequestSafely(c *context, r *http.Request) {
- defer func() {
- if x := recover(); x != nil {
- logf(c, 4, "%s", renderPanic(x)) // 4 == critical
- c.outCode = 500
- }
- }()
-
- http.DefaultServeMux.ServeHTTP(c, r)
-}
-
-func renderPanic(x interface{}) string {
- buf := make([]byte, 16<<10) // 16 KB should be plenty
- buf = buf[:runtime.Stack(buf, false)]
-
- // Remove the first few stack frames:
- // this func
- // the recover closure in the caller
- // That will root the stack trace at the site of the panic.
- const (
- skipStart = "internal.renderPanic"
- skipFrames = 2
- )
- start := bytes.Index(buf, []byte(skipStart))
- p := start
- for i := 0; i < skipFrames*2 && p+1 < len(buf); i++ {
- p = bytes.IndexByte(buf[p+1:], '\n') + p + 1
- if p < 0 {
- break
- }
- }
- if p >= 0 {
- // buf[start:p+1] is the block to remove.
- // Copy buf[p+1:] over buf[start:] and shrink buf.
- copy(buf[start:], buf[p+1:])
- buf = buf[:len(buf)-(p+1-start)]
- }
-
- // Add panic heading.
- head := fmt.Sprintf("panic: %v\n\n", x)
- if len(head) > len(buf) {
- // Extremely unlikely to happen.
- return head
- }
- copy(buf[len(head):], buf)
- copy(buf, head)
-
- return string(buf)
-}
-
-var ctxs = struct {
- sync.Mutex
- m map[*http.Request]*context
- bg *context // background context, lazily initialized
- // dec is used by tests to decorate the netcontext.Context returned
- // for a given request. This allows tests to add overrides (such as
- // WithAppIDOverride) to the context. The map is nil outside tests.
- dec map[*http.Request]func(netcontext.Context) netcontext.Context
-}{
- m: make(map[*http.Request]*context),
-}
-
-// context represents the context of an in-flight HTTP request.
-// It implements the appengine.Context and http.ResponseWriter interfaces.
-type context struct {
- req *http.Request
-
- outCode int
- outHeader http.Header
- outBody []byte
-
- pendingLogs struct {
- sync.Mutex
- lines []*logpb.UserAppLogLine
- flushes int
- }
-
- apiURL *url.URL
-}
-
-var contextKey = "holds a *context"
-
-func fromContext(ctx netcontext.Context) *context {
- c, _ := ctx.Value(&contextKey).(*context)
- return c
-}
-
-func withContext(parent netcontext.Context, c *context) netcontext.Context {
- ctx := netcontext.WithValue(parent, &contextKey, c)
- if ns := c.req.Header.Get(curNamespaceHeader); ns != "" {
- ctx = withNamespace(ctx, ns)
- }
- return ctx
-}
-
-func toContext(c *context) netcontext.Context {
- return withContext(netcontext.Background(), c)
-}
-
-func IncomingHeaders(ctx netcontext.Context) http.Header {
- if c := fromContext(ctx); c != nil {
- return c.req.Header
- }
- return nil
-}
-
-func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context {
- ctxs.Lock()
- c := ctxs.m[req]
- d := ctxs.dec[req]
- ctxs.Unlock()
-
- if d != nil {
- parent = d(parent)
- }
-
- if c == nil {
- // Someone passed in an http.Request that is not in-flight.
- // We panic here rather than panicking at a later point
- // so that stack traces will be more sensible.
- log.Panic("appengine: NewContext passed an unknown http.Request")
- }
- return withContext(parent, c)
-}
-
-func BackgroundContext() netcontext.Context {
- ctxs.Lock()
- defer ctxs.Unlock()
-
- if ctxs.bg != nil {
- return toContext(ctxs.bg)
- }
-
- // Compute background security ticket.
- appID := partitionlessAppID()
- escAppID := strings.Replace(strings.Replace(appID, ":", "_", -1), ".", "_", -1)
- majVersion := VersionID(nil)
- if i := strings.Index(majVersion, "."); i > 0 {
- majVersion = majVersion[:i]
- }
- ticket := fmt.Sprintf("%s/%s.%s.%s", escAppID, ModuleName(nil), majVersion, InstanceID())
-
- ctxs.bg = &context{
- req: &http.Request{
- Header: http.Header{
- ticketHeader: []string{ticket},
- },
- },
- apiURL: apiURL(),
- }
-
- // TODO(dsymonds): Wire up the shutdown handler to do a final flush.
- go ctxs.bg.logFlusher(make(chan int))
-
- return toContext(ctxs.bg)
-}
-
-// RegisterTestRequest registers the HTTP request req for testing, such that
-// any API calls are sent to the provided URL. It returns a closure to delete
-// the registration.
-// It should only be used by aetest package.
-func RegisterTestRequest(req *http.Request, apiURL *url.URL, decorate func(netcontext.Context) netcontext.Context) func() {
- c := &context{
- req: req,
- apiURL: apiURL,
- }
- ctxs.Lock()
- defer ctxs.Unlock()
- if _, ok := ctxs.m[req]; ok {
- log.Panic("req already associated with context")
- }
- if _, ok := ctxs.dec[req]; ok {
- log.Panic("req already associated with context")
- }
- if ctxs.dec == nil {
- ctxs.dec = make(map[*http.Request]func(netcontext.Context) netcontext.Context)
- }
- ctxs.m[req] = c
- ctxs.dec[req] = decorate
-
- return func() {
- ctxs.Lock()
- delete(ctxs.m, req)
- delete(ctxs.dec, req)
- ctxs.Unlock()
- }
-}
-
-var errTimeout = &CallError{
- Detail: "Deadline exceeded",
- Code: int32(remotepb.RpcError_CANCELLED),
- Timeout: true,
-}
-
-func (c *context) Header() http.Header { return c.outHeader }
-
-// Copied from $GOROOT/src/pkg/net/http/transfer.go. Some response status
-// codes do not permit a response body (nor response entity headers such as
-// Content-Length, Content-Type, etc).
-func bodyAllowedForStatus(status int) bool {
- switch {
- case status >= 100 && status <= 199:
- return false
- case status == 204:
- return false
- case status == 304:
- return false
- }
- return true
-}
-
-func (c *context) Write(b []byte) (int, error) {
- if c.outCode == 0 {
- c.WriteHeader(http.StatusOK)
- }
- if len(b) > 0 && !bodyAllowedForStatus(c.outCode) {
- return 0, http.ErrBodyNotAllowed
- }
- c.outBody = append(c.outBody, b...)
- return len(b), nil
-}
-
-func (c *context) WriteHeader(code int) {
- if c.outCode != 0 {
- logf(c, 3, "WriteHeader called multiple times on request.") // error level
- return
- }
- c.outCode = code
-}
-
-func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) {
- hreq := &http.Request{
- Method: "POST",
- URL: c.apiURL,
- Header: http.Header{
- apiEndpointHeader: apiEndpointHeaderValue,
- apiMethodHeader: apiMethodHeaderValue,
- apiContentType: apiContentTypeValue,
- apiDeadlineHeader: []string{strconv.FormatFloat(timeout.Seconds(), 'f', -1, 64)},
- },
- Body: ioutil.NopCloser(bytes.NewReader(body)),
- ContentLength: int64(len(body)),
- Host: c.apiURL.Host,
- }
- if info := c.req.Header.Get(dapperHeader); info != "" {
- hreq.Header.Set(dapperHeader, info)
- }
- if info := c.req.Header.Get(traceHeader); info != "" {
- hreq.Header.Set(traceHeader, info)
- }
-
- tr := apiHTTPClient.Transport.(*http.Transport)
-
- var timedOut int32 // atomic; set to 1 if timed out
- t := time.AfterFunc(timeout, func() {
- atomic.StoreInt32(&timedOut, 1)
- tr.CancelRequest(hreq)
- })
- defer t.Stop()
- defer func() {
- // Check if timeout was exceeded.
- if atomic.LoadInt32(&timedOut) != 0 {
- err = errTimeout
- }
- }()
-
- hresp, err := apiHTTPClient.Do(hreq)
- if err != nil {
- return nil, &CallError{
- Detail: fmt.Sprintf("service bridge HTTP failed: %v", err),
- Code: int32(remotepb.RpcError_UNKNOWN),
- }
- }
- defer hresp.Body.Close()
- hrespBody, err := ioutil.ReadAll(hresp.Body)
- if hresp.StatusCode != 200 {
- return nil, &CallError{
- Detail: fmt.Sprintf("service bridge returned HTTP %d (%q)", hresp.StatusCode, hrespBody),
- Code: int32(remotepb.RpcError_UNKNOWN),
- }
- }
- if err != nil {
- return nil, &CallError{
- Detail: fmt.Sprintf("service bridge response bad: %v", err),
- Code: int32(remotepb.RpcError_UNKNOWN),
- }
- }
- return hrespBody, nil
-}
-
-func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
- if f, ctx, ok := callOverrideFromContext(ctx); ok {
- return f(ctx, service, method, in, out)
- }
-
- // Handle already-done contexts quickly.
- select {
- case <-ctx.Done():
- return ctx.Err()
- default:
- }
-
- c := fromContext(ctx)
- if c == nil {
- // Give a good error message rather than a panic lower down.
- return errors.New("not an App Engine context")
- }
-
- // Apply transaction modifications if we're in a transaction.
- if t := transactionFromContext(ctx); t != nil {
- if t.finished {
- return errors.New("transaction context has expired")
- }
- applyTransaction(in, &t.transaction)
- }
-
- // Default RPC timeout is 60s.
- timeout := 60 * time.Second
- if deadline, ok := ctx.Deadline(); ok {
- timeout = deadline.Sub(time.Now())
- }
-
- data, err := proto.Marshal(in)
- if err != nil {
- return err
- }
-
- ticket := c.req.Header.Get(ticketHeader)
- req := &remotepb.Request{
- ServiceName: &service,
- Method: &method,
- Request: data,
- RequestId: &ticket,
- }
- hreqBody, err := proto.Marshal(req)
- if err != nil {
- return err
- }
-
- hrespBody, err := c.post(hreqBody, timeout)
- if err != nil {
- return err
- }
-
- res := &remotepb.Response{}
- if err := proto.Unmarshal(hrespBody, res); err != nil {
- return err
- }
- if res.RpcError != nil {
- ce := &CallError{
- Detail: res.RpcError.GetDetail(),
- Code: *res.RpcError.Code,
- }
- switch remotepb.RpcError_ErrorCode(ce.Code) {
- case remotepb.RpcError_CANCELLED, remotepb.RpcError_DEADLINE_EXCEEDED:
- ce.Timeout = true
- }
- return ce
- }
- if res.ApplicationError != nil {
- return &APIError{
- Service: *req.ServiceName,
- Detail: res.ApplicationError.GetDetail(),
- Code: *res.ApplicationError.Code,
- }
- }
- if res.Exception != nil || res.JavaException != nil {
- // This shouldn't happen, but let's be defensive.
- return &CallError{
- Detail: "service bridge returned exception",
- Code: int32(remotepb.RpcError_UNKNOWN),
- }
- }
- return proto.Unmarshal(res.Response, out)
-}
-
-func (c *context) Request() *http.Request {
- return c.req
-}
-
-func (c *context) addLogLine(ll *logpb.UserAppLogLine) {
- // Truncate long log lines.
- // TODO(dsymonds): Check if this is still necessary.
- const lim = 8 << 10
- if len(*ll.Message) > lim {
- suffix := fmt.Sprintf("...(length %d)", len(*ll.Message))
- ll.Message = proto.String((*ll.Message)[:lim-len(suffix)] + suffix)
- }
-
- c.pendingLogs.Lock()
- c.pendingLogs.lines = append(c.pendingLogs.lines, ll)
- c.pendingLogs.Unlock()
-}
-
-var logLevelName = map[int64]string{
- 0: "DEBUG",
- 1: "INFO",
- 2: "WARNING",
- 3: "ERROR",
- 4: "CRITICAL",
-}
-
-func logf(c *context, level int64, format string, args ...interface{}) {
- s := fmt.Sprintf(format, args...)
- s = strings.TrimRight(s, "\n") // Remove any trailing newline characters.
- c.addLogLine(&logpb.UserAppLogLine{
- TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3),
- Level: &level,
- Message: &s,
- })
- log.Print(logLevelName[level] + ": " + s)
-}
-
-// flushLog attempts to flush any pending logs to the appserver.
-// It should not be called concurrently.
-func (c *context) flushLog(force bool) (flushed bool) {
- c.pendingLogs.Lock()
- // Grab up to 30 MB. We can get away with up to 32 MB, but let's be cautious.
- n, rem := 0, 30<<20
- for ; n < len(c.pendingLogs.lines); n++ {
- ll := c.pendingLogs.lines[n]
- // Each log line will require about 3 bytes of overhead.
- nb := proto.Size(ll) + 3
- if nb > rem {
- break
- }
- rem -= nb
- }
- lines := c.pendingLogs.lines[:n]
- c.pendingLogs.lines = c.pendingLogs.lines[n:]
- c.pendingLogs.Unlock()
-
- if len(lines) == 0 && !force {
- // Nothing to flush.
- return false
- }
-
- rescueLogs := false
- defer func() {
- if rescueLogs {
- c.pendingLogs.Lock()
- c.pendingLogs.lines = append(lines, c.pendingLogs.lines...)
- c.pendingLogs.Unlock()
- }
- }()
-
- buf, err := proto.Marshal(&logpb.UserAppLogGroup{
- LogLine: lines,
- })
- if err != nil {
- log.Printf("internal.flushLog: marshaling UserAppLogGroup: %v", err)
- rescueLogs = true
- return false
- }
-
- req := &logpb.FlushRequest{
- Logs: buf,
- }
- res := &basepb.VoidProto{}
- c.pendingLogs.Lock()
- c.pendingLogs.flushes++
- c.pendingLogs.Unlock()
- if err := Call(toContext(c), "logservice", "Flush", req, res); err != nil {
- log.Printf("internal.flushLog: Flush RPC: %v", err)
- rescueLogs = true
- return false
- }
- return true
-}
-
-const (
- // Log flushing parameters.
- flushInterval = 1 * time.Second
- forceFlushInterval = 60 * time.Second
-)
-
-func (c *context) logFlusher(stop <-chan int) {
- lastFlush := time.Now()
- tick := time.NewTicker(flushInterval)
- for {
- select {
- case <-stop:
- // Request finished.
- tick.Stop()
- return
- case <-tick.C:
- force := time.Now().Sub(lastFlush) > forceFlushInterval
- if c.flushLog(force) {
- lastFlush = time.Now()
- }
- }
- }
-}
-
-func ContextForTesting(req *http.Request) netcontext.Context {
- return toContext(&context{req: req})
-}
diff --git a/vendor/google.golang.org/appengine/internal/api_classic.go b/vendor/google.golang.org/appengine/internal/api_classic.go
deleted file mode 100644
index 1c072e9..0000000
--- a/vendor/google.golang.org/appengine/internal/api_classic.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build appengine
-
-package internal
-
-import (
- "errors"
- "net/http"
- "time"
-
- "appengine"
- "appengine_internal"
- basepb "appengine_internal/base"
-
- "github.com/golang/protobuf/proto"
- netcontext "golang.org/x/net/context"
-)
-
-var contextKey = "holds an appengine.Context"
-
-func fromContext(ctx netcontext.Context) appengine.Context {
- c, _ := ctx.Value(&contextKey).(appengine.Context)
- return c
-}
-
-// This is only for classic App Engine adapters.
-func ClassicContextFromContext(ctx netcontext.Context) appengine.Context {
- return fromContext(ctx)
-}
-
-func withContext(parent netcontext.Context, c appengine.Context) netcontext.Context {
- ctx := netcontext.WithValue(parent, &contextKey, c)
-
- s := &basepb.StringProto{}
- c.Call("__go__", "GetNamespace", &basepb.VoidProto{}, s, nil)
- if ns := s.GetValue(); ns != "" {
- ctx = NamespacedContext(ctx, ns)
- }
-
- return ctx
-}
-
-func IncomingHeaders(ctx netcontext.Context) http.Header {
- if c := fromContext(ctx); c != nil {
- if req, ok := c.Request().(*http.Request); ok {
- return req.Header
- }
- }
- return nil
-}
-
-func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context {
- c := appengine.NewContext(req)
- return withContext(parent, c)
-}
-
-func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
- if f, ctx, ok := callOverrideFromContext(ctx); ok {
- return f(ctx, service, method, in, out)
- }
-
- // Handle already-done contexts quickly.
- select {
- case <-ctx.Done():
- return ctx.Err()
- default:
- }
-
- c := fromContext(ctx)
- if c == nil {
- // Give a good error message rather than a panic lower down.
- return errors.New("not an App Engine context")
- }
-
- // Apply transaction modifications if we're in a transaction.
- if t := transactionFromContext(ctx); t != nil {
- if t.finished {
- return errors.New("transaction context has expired")
- }
- applyTransaction(in, &t.transaction)
- }
-
- var opts *appengine_internal.CallOptions
- if d, ok := ctx.Deadline(); ok {
- opts = &appengine_internal.CallOptions{
- Timeout: d.Sub(time.Now()),
- }
- }
-
- err := c.Call(service, method, in, out, opts)
- switch v := err.(type) {
- case *appengine_internal.APIError:
- return &APIError{
- Service: v.Service,
- Detail: v.Detail,
- Code: v.Code,
- }
- case *appengine_internal.CallError:
- return &CallError{
- Detail: v.Detail,
- Code: v.Code,
- Timeout: v.Timeout,
- }
- }
- return err
-}
-
-func handleHTTP(w http.ResponseWriter, r *http.Request) {
- panic("handleHTTP called; this should be impossible")
-}
-
-func logf(c appengine.Context, level int64, format string, args ...interface{}) {
- var fn func(format string, args ...interface{})
- switch level {
- case 0:
- fn = c.Debugf
- case 1:
- fn = c.Infof
- case 2:
- fn = c.Warningf
- case 3:
- fn = c.Errorf
- case 4:
- fn = c.Criticalf
- default:
- // This shouldn't happen.
- fn = c.Criticalf
- }
- fn(format, args...)
-}
diff --git a/vendor/google.golang.org/appengine/internal/api_common.go b/vendor/google.golang.org/appengine/internal/api_common.go
deleted file mode 100644
index ec5383e..0000000
--- a/vendor/google.golang.org/appengine/internal/api_common.go
+++ /dev/null
@@ -1,101 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-import (
- "github.com/golang/protobuf/proto"
- netcontext "golang.org/x/net/context"
-)
-
-type CallOverrideFunc func(ctx netcontext.Context, service, method string, in, out proto.Message) error
-
-var callOverrideKey = "holds []CallOverrideFunc"
-
-func WithCallOverride(ctx netcontext.Context, f CallOverrideFunc) netcontext.Context {
- // We avoid appending to any existing call override
- // so we don't risk overwriting a popped stack below.
- var cofs []CallOverrideFunc
- if uf, ok := ctx.Value(&callOverrideKey).([]CallOverrideFunc); ok {
- cofs = append(cofs, uf...)
- }
- cofs = append(cofs, f)
- return netcontext.WithValue(ctx, &callOverrideKey, cofs)
-}
-
-func callOverrideFromContext(ctx netcontext.Context) (CallOverrideFunc, netcontext.Context, bool) {
- cofs, _ := ctx.Value(&callOverrideKey).([]CallOverrideFunc)
- if len(cofs) == 0 {
- return nil, nil, false
- }
- // We found a list of overrides; grab the last, and reconstitute a
- // context that will hide it.
- f := cofs[len(cofs)-1]
- ctx = netcontext.WithValue(ctx, &callOverrideKey, cofs[:len(cofs)-1])
- return f, ctx, true
-}
-
-type logOverrideFunc func(level int64, format string, args ...interface{})
-
-var logOverrideKey = "holds a logOverrideFunc"
-
-func WithLogOverride(ctx netcontext.Context, f logOverrideFunc) netcontext.Context {
- return netcontext.WithValue(ctx, &logOverrideKey, f)
-}
-
-var appIDOverrideKey = "holds a string, being the full app ID"
-
-func WithAppIDOverride(ctx netcontext.Context, appID string) netcontext.Context {
- return netcontext.WithValue(ctx, &appIDOverrideKey, appID)
-}
-
-var namespaceKey = "holds the namespace string"
-
-func withNamespace(ctx netcontext.Context, ns string) netcontext.Context {
- return netcontext.WithValue(ctx, &namespaceKey, ns)
-}
-
-func NamespaceFromContext(ctx netcontext.Context) string {
- // If there's no namespace, return the empty string.
- ns, _ := ctx.Value(&namespaceKey).(string)
- return ns
-}
-
-// FullyQualifiedAppID returns the fully-qualified application ID.
-// This may contain a partition prefix (e.g. "s~" for High Replication apps),
-// or a domain prefix (e.g. "example.com:").
-func FullyQualifiedAppID(ctx netcontext.Context) string {
- if id, ok := ctx.Value(&appIDOverrideKey).(string); ok {
- return id
- }
- return fullyQualifiedAppID(ctx)
-}
-
-func Logf(ctx netcontext.Context, level int64, format string, args ...interface{}) {
- if f, ok := ctx.Value(&logOverrideKey).(logOverrideFunc); ok {
- f(level, format, args...)
- return
- }
- logf(fromContext(ctx), level, format, args...)
-}
-
-// NamespacedContext wraps a Context to support namespaces.
-func NamespacedContext(ctx netcontext.Context, namespace string) netcontext.Context {
- n := &namespacedContext{
- namespace: namespace,
- }
- return withNamespace(WithCallOverride(ctx, n.call), namespace)
-}
-
-type namespacedContext struct {
- namespace string
-}
-
-func (n *namespacedContext) call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
- // Apply any namespace mods.
- if mod, ok := NamespaceMods[service]; ok {
- mod(in, n.namespace)
- }
- return Call(ctx, service, method, in, out)
-}
diff --git a/vendor/google.golang.org/appengine/internal/app_id.go b/vendor/google.golang.org/appengine/internal/app_id.go
deleted file mode 100644
index 11df8c0..0000000
--- a/vendor/google.golang.org/appengine/internal/app_id.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-import (
- "strings"
-)
-
-func parseFullAppID(appid string) (partition, domain, displayID string) {
- if i := strings.Index(appid, "~"); i != -1 {
- partition, appid = appid[:i], appid[i+1:]
- }
- if i := strings.Index(appid, ":"); i != -1 {
- domain, appid = appid[:i], appid[i+1:]
- }
- return partition, domain, appid
-}
-
-// appID returns "appid" or "domain.com:appid".
-func appID(fullAppID string) string {
- _, dom, dis := parseFullAppID(fullAppID)
- if dom != "" {
- return dom + ":" + dis
- }
- return dis
-}
diff --git a/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.pb.go b/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.pb.go
deleted file mode 100644
index 87d9701..0000000
--- a/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.pb.go
+++ /dev/null
@@ -1,296 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/app_identity/app_identity_service.proto
-// DO NOT EDIT!
-
-/*
-Package app_identity is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/app_identity/app_identity_service.proto
-
-It has these top-level messages:
- AppIdentityServiceError
- SignForAppRequest
- SignForAppResponse
- GetPublicCertificateForAppRequest
- PublicCertificate
- GetPublicCertificateForAppResponse
- GetServiceAccountNameRequest
- GetServiceAccountNameResponse
- GetAccessTokenRequest
- GetAccessTokenResponse
- GetDefaultGcsBucketNameRequest
- GetDefaultGcsBucketNameResponse
-*/
-package app_identity
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type AppIdentityServiceError_ErrorCode int32
-
-const (
- AppIdentityServiceError_SUCCESS AppIdentityServiceError_ErrorCode = 0
- AppIdentityServiceError_UNKNOWN_SCOPE AppIdentityServiceError_ErrorCode = 9
- AppIdentityServiceError_BLOB_TOO_LARGE AppIdentityServiceError_ErrorCode = 1000
- AppIdentityServiceError_DEADLINE_EXCEEDED AppIdentityServiceError_ErrorCode = 1001
- AppIdentityServiceError_NOT_A_VALID_APP AppIdentityServiceError_ErrorCode = 1002
- AppIdentityServiceError_UNKNOWN_ERROR AppIdentityServiceError_ErrorCode = 1003
- AppIdentityServiceError_NOT_ALLOWED AppIdentityServiceError_ErrorCode = 1005
- AppIdentityServiceError_NOT_IMPLEMENTED AppIdentityServiceError_ErrorCode = 1006
-)
-
-var AppIdentityServiceError_ErrorCode_name = map[int32]string{
- 0: "SUCCESS",
- 9: "UNKNOWN_SCOPE",
- 1000: "BLOB_TOO_LARGE",
- 1001: "DEADLINE_EXCEEDED",
- 1002: "NOT_A_VALID_APP",
- 1003: "UNKNOWN_ERROR",
- 1005: "NOT_ALLOWED",
- 1006: "NOT_IMPLEMENTED",
-}
-var AppIdentityServiceError_ErrorCode_value = map[string]int32{
- "SUCCESS": 0,
- "UNKNOWN_SCOPE": 9,
- "BLOB_TOO_LARGE": 1000,
- "DEADLINE_EXCEEDED": 1001,
- "NOT_A_VALID_APP": 1002,
- "UNKNOWN_ERROR": 1003,
- "NOT_ALLOWED": 1005,
- "NOT_IMPLEMENTED": 1006,
-}
-
-func (x AppIdentityServiceError_ErrorCode) Enum() *AppIdentityServiceError_ErrorCode {
- p := new(AppIdentityServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x AppIdentityServiceError_ErrorCode) String() string {
- return proto.EnumName(AppIdentityServiceError_ErrorCode_name, int32(x))
-}
-func (x *AppIdentityServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(AppIdentityServiceError_ErrorCode_value, data, "AppIdentityServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = AppIdentityServiceError_ErrorCode(value)
- return nil
-}
-
-type AppIdentityServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AppIdentityServiceError) Reset() { *m = AppIdentityServiceError{} }
-func (m *AppIdentityServiceError) String() string { return proto.CompactTextString(m) }
-func (*AppIdentityServiceError) ProtoMessage() {}
-
-type SignForAppRequest struct {
- BytesToSign []byte `protobuf:"bytes,1,opt,name=bytes_to_sign" json:"bytes_to_sign,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SignForAppRequest) Reset() { *m = SignForAppRequest{} }
-func (m *SignForAppRequest) String() string { return proto.CompactTextString(m) }
-func (*SignForAppRequest) ProtoMessage() {}
-
-func (m *SignForAppRequest) GetBytesToSign() []byte {
- if m != nil {
- return m.BytesToSign
- }
- return nil
-}
-
-type SignForAppResponse struct {
- KeyName *string `protobuf:"bytes,1,opt,name=key_name" json:"key_name,omitempty"`
- SignatureBytes []byte `protobuf:"bytes,2,opt,name=signature_bytes" json:"signature_bytes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SignForAppResponse) Reset() { *m = SignForAppResponse{} }
-func (m *SignForAppResponse) String() string { return proto.CompactTextString(m) }
-func (*SignForAppResponse) ProtoMessage() {}
-
-func (m *SignForAppResponse) GetKeyName() string {
- if m != nil && m.KeyName != nil {
- return *m.KeyName
- }
- return ""
-}
-
-func (m *SignForAppResponse) GetSignatureBytes() []byte {
- if m != nil {
- return m.SignatureBytes
- }
- return nil
-}
-
-type GetPublicCertificateForAppRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetPublicCertificateForAppRequest) Reset() { *m = GetPublicCertificateForAppRequest{} }
-func (m *GetPublicCertificateForAppRequest) String() string { return proto.CompactTextString(m) }
-func (*GetPublicCertificateForAppRequest) ProtoMessage() {}
-
-type PublicCertificate struct {
- KeyName *string `protobuf:"bytes,1,opt,name=key_name" json:"key_name,omitempty"`
- X509CertificatePem *string `protobuf:"bytes,2,opt,name=x509_certificate_pem" json:"x509_certificate_pem,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PublicCertificate) Reset() { *m = PublicCertificate{} }
-func (m *PublicCertificate) String() string { return proto.CompactTextString(m) }
-func (*PublicCertificate) ProtoMessage() {}
-
-func (m *PublicCertificate) GetKeyName() string {
- if m != nil && m.KeyName != nil {
- return *m.KeyName
- }
- return ""
-}
-
-func (m *PublicCertificate) GetX509CertificatePem() string {
- if m != nil && m.X509CertificatePem != nil {
- return *m.X509CertificatePem
- }
- return ""
-}
-
-type GetPublicCertificateForAppResponse struct {
- PublicCertificateList []*PublicCertificate `protobuf:"bytes,1,rep,name=public_certificate_list" json:"public_certificate_list,omitempty"`
- MaxClientCacheTimeInSecond *int64 `protobuf:"varint,2,opt,name=max_client_cache_time_in_second" json:"max_client_cache_time_in_second,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetPublicCertificateForAppResponse) Reset() { *m = GetPublicCertificateForAppResponse{} }
-func (m *GetPublicCertificateForAppResponse) String() string { return proto.CompactTextString(m) }
-func (*GetPublicCertificateForAppResponse) ProtoMessage() {}
-
-func (m *GetPublicCertificateForAppResponse) GetPublicCertificateList() []*PublicCertificate {
- if m != nil {
- return m.PublicCertificateList
- }
- return nil
-}
-
-func (m *GetPublicCertificateForAppResponse) GetMaxClientCacheTimeInSecond() int64 {
- if m != nil && m.MaxClientCacheTimeInSecond != nil {
- return *m.MaxClientCacheTimeInSecond
- }
- return 0
-}
-
-type GetServiceAccountNameRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetServiceAccountNameRequest) Reset() { *m = GetServiceAccountNameRequest{} }
-func (m *GetServiceAccountNameRequest) String() string { return proto.CompactTextString(m) }
-func (*GetServiceAccountNameRequest) ProtoMessage() {}
-
-type GetServiceAccountNameResponse struct {
- ServiceAccountName *string `protobuf:"bytes,1,opt,name=service_account_name" json:"service_account_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetServiceAccountNameResponse) Reset() { *m = GetServiceAccountNameResponse{} }
-func (m *GetServiceAccountNameResponse) String() string { return proto.CompactTextString(m) }
-func (*GetServiceAccountNameResponse) ProtoMessage() {}
-
-func (m *GetServiceAccountNameResponse) GetServiceAccountName() string {
- if m != nil && m.ServiceAccountName != nil {
- return *m.ServiceAccountName
- }
- return ""
-}
-
-type GetAccessTokenRequest struct {
- Scope []string `protobuf:"bytes,1,rep,name=scope" json:"scope,omitempty"`
- ServiceAccountId *int64 `protobuf:"varint,2,opt,name=service_account_id" json:"service_account_id,omitempty"`
- ServiceAccountName *string `protobuf:"bytes,3,opt,name=service_account_name" json:"service_account_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetAccessTokenRequest) Reset() { *m = GetAccessTokenRequest{} }
-func (m *GetAccessTokenRequest) String() string { return proto.CompactTextString(m) }
-func (*GetAccessTokenRequest) ProtoMessage() {}
-
-func (m *GetAccessTokenRequest) GetScope() []string {
- if m != nil {
- return m.Scope
- }
- return nil
-}
-
-func (m *GetAccessTokenRequest) GetServiceAccountId() int64 {
- if m != nil && m.ServiceAccountId != nil {
- return *m.ServiceAccountId
- }
- return 0
-}
-
-func (m *GetAccessTokenRequest) GetServiceAccountName() string {
- if m != nil && m.ServiceAccountName != nil {
- return *m.ServiceAccountName
- }
- return ""
-}
-
-type GetAccessTokenResponse struct {
- AccessToken *string `protobuf:"bytes,1,opt,name=access_token" json:"access_token,omitempty"`
- ExpirationTime *int64 `protobuf:"varint,2,opt,name=expiration_time" json:"expiration_time,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetAccessTokenResponse) Reset() { *m = GetAccessTokenResponse{} }
-func (m *GetAccessTokenResponse) String() string { return proto.CompactTextString(m) }
-func (*GetAccessTokenResponse) ProtoMessage() {}
-
-func (m *GetAccessTokenResponse) GetAccessToken() string {
- if m != nil && m.AccessToken != nil {
- return *m.AccessToken
- }
- return ""
-}
-
-func (m *GetAccessTokenResponse) GetExpirationTime() int64 {
- if m != nil && m.ExpirationTime != nil {
- return *m.ExpirationTime
- }
- return 0
-}
-
-type GetDefaultGcsBucketNameRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetDefaultGcsBucketNameRequest) Reset() { *m = GetDefaultGcsBucketNameRequest{} }
-func (m *GetDefaultGcsBucketNameRequest) String() string { return proto.CompactTextString(m) }
-func (*GetDefaultGcsBucketNameRequest) ProtoMessage() {}
-
-type GetDefaultGcsBucketNameResponse struct {
- DefaultGcsBucketName *string `protobuf:"bytes,1,opt,name=default_gcs_bucket_name" json:"default_gcs_bucket_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetDefaultGcsBucketNameResponse) Reset() { *m = GetDefaultGcsBucketNameResponse{} }
-func (m *GetDefaultGcsBucketNameResponse) String() string { return proto.CompactTextString(m) }
-func (*GetDefaultGcsBucketNameResponse) ProtoMessage() {}
-
-func (m *GetDefaultGcsBucketNameResponse) GetDefaultGcsBucketName() string {
- if m != nil && m.DefaultGcsBucketName != nil {
- return *m.DefaultGcsBucketName
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.proto b/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.proto
deleted file mode 100644
index 19610ca..0000000
--- a/vendor/google.golang.org/appengine/internal/app_identity/app_identity_service.proto
+++ /dev/null
@@ -1,64 +0,0 @@
-syntax = "proto2";
-option go_package = "app_identity";
-
-package appengine;
-
-message AppIdentityServiceError {
- enum ErrorCode {
- SUCCESS = 0;
- UNKNOWN_SCOPE = 9;
- BLOB_TOO_LARGE = 1000;
- DEADLINE_EXCEEDED = 1001;
- NOT_A_VALID_APP = 1002;
- UNKNOWN_ERROR = 1003;
- NOT_ALLOWED = 1005;
- NOT_IMPLEMENTED = 1006;
- }
-}
-
-message SignForAppRequest {
- optional bytes bytes_to_sign = 1;
-}
-
-message SignForAppResponse {
- optional string key_name = 1;
- optional bytes signature_bytes = 2;
-}
-
-message GetPublicCertificateForAppRequest {
-}
-
-message PublicCertificate {
- optional string key_name = 1;
- optional string x509_certificate_pem = 2;
-}
-
-message GetPublicCertificateForAppResponse {
- repeated PublicCertificate public_certificate_list = 1;
- optional int64 max_client_cache_time_in_second = 2;
-}
-
-message GetServiceAccountNameRequest {
-}
-
-message GetServiceAccountNameResponse {
- optional string service_account_name = 1;
-}
-
-message GetAccessTokenRequest {
- repeated string scope = 1;
- optional int64 service_account_id = 2;
- optional string service_account_name = 3;
-}
-
-message GetAccessTokenResponse {
- optional string access_token = 1;
- optional int64 expiration_time = 2;
-}
-
-message GetDefaultGcsBucketNameRequest {
-}
-
-message GetDefaultGcsBucketNameResponse {
- optional string default_gcs_bucket_name = 1;
-}
diff --git a/vendor/google.golang.org/appengine/internal/base/api_base.pb.go b/vendor/google.golang.org/appengine/internal/base/api_base.pb.go
deleted file mode 100644
index 36a1956..0000000
--- a/vendor/google.golang.org/appengine/internal/base/api_base.pb.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/base/api_base.proto
-// DO NOT EDIT!
-
-/*
-Package base is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/base/api_base.proto
-
-It has these top-level messages:
- StringProto
- Integer32Proto
- Integer64Proto
- BoolProto
- DoubleProto
- BytesProto
- VoidProto
-*/
-package base
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type StringProto struct {
- Value *string `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StringProto) Reset() { *m = StringProto{} }
-func (m *StringProto) String() string { return proto.CompactTextString(m) }
-func (*StringProto) ProtoMessage() {}
-
-func (m *StringProto) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-type Integer32Proto struct {
- Value *int32 `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Integer32Proto) Reset() { *m = Integer32Proto{} }
-func (m *Integer32Proto) String() string { return proto.CompactTextString(m) }
-func (*Integer32Proto) ProtoMessage() {}
-
-func (m *Integer32Proto) GetValue() int32 {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return 0
-}
-
-type Integer64Proto struct {
- Value *int64 `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Integer64Proto) Reset() { *m = Integer64Proto{} }
-func (m *Integer64Proto) String() string { return proto.CompactTextString(m) }
-func (*Integer64Proto) ProtoMessage() {}
-
-func (m *Integer64Proto) GetValue() int64 {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return 0
-}
-
-type BoolProto struct {
- Value *bool `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BoolProto) Reset() { *m = BoolProto{} }
-func (m *BoolProto) String() string { return proto.CompactTextString(m) }
-func (*BoolProto) ProtoMessage() {}
-
-func (m *BoolProto) GetValue() bool {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return false
-}
-
-type DoubleProto struct {
- Value *float64 `protobuf:"fixed64,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DoubleProto) Reset() { *m = DoubleProto{} }
-func (m *DoubleProto) String() string { return proto.CompactTextString(m) }
-func (*DoubleProto) ProtoMessage() {}
-
-func (m *DoubleProto) GetValue() float64 {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return 0
-}
-
-type BytesProto struct {
- Value []byte `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BytesProto) Reset() { *m = BytesProto{} }
-func (m *BytesProto) String() string { return proto.CompactTextString(m) }
-func (*BytesProto) ProtoMessage() {}
-
-func (m *BytesProto) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type VoidProto struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *VoidProto) Reset() { *m = VoidProto{} }
-func (m *VoidProto) String() string { return proto.CompactTextString(m) }
-func (*VoidProto) ProtoMessage() {}
diff --git a/vendor/google.golang.org/appengine/internal/base/api_base.proto b/vendor/google.golang.org/appengine/internal/base/api_base.proto
deleted file mode 100644
index 56cd7a3..0000000
--- a/vendor/google.golang.org/appengine/internal/base/api_base.proto
+++ /dev/null
@@ -1,33 +0,0 @@
-// Built-in base types for API calls. Primarily useful as return types.
-
-syntax = "proto2";
-option go_package = "base";
-
-package appengine.base;
-
-message StringProto {
- required string value = 1;
-}
-
-message Integer32Proto {
- required int32 value = 1;
-}
-
-message Integer64Proto {
- required int64 value = 1;
-}
-
-message BoolProto {
- required bool value = 1;
-}
-
-message DoubleProto {
- required double value = 1;
-}
-
-message BytesProto {
- required bytes value = 1 [ctype=CORD];
-}
-
-message VoidProto {
-}
diff --git a/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.pb.go b/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.pb.go
deleted file mode 100644
index 8705ec3..0000000
--- a/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.pb.go
+++ /dev/null
@@ -1,347 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/blobstore/blobstore_service.proto
-// DO NOT EDIT!
-
-/*
-Package blobstore is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/blobstore/blobstore_service.proto
-
-It has these top-level messages:
- BlobstoreServiceError
- CreateUploadURLRequest
- CreateUploadURLResponse
- DeleteBlobRequest
- FetchDataRequest
- FetchDataResponse
- CloneBlobRequest
- CloneBlobResponse
- DecodeBlobKeyRequest
- DecodeBlobKeyResponse
- CreateEncodedGoogleStorageKeyRequest
- CreateEncodedGoogleStorageKeyResponse
-*/
-package blobstore
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type BlobstoreServiceError_ErrorCode int32
-
-const (
- BlobstoreServiceError_OK BlobstoreServiceError_ErrorCode = 0
- BlobstoreServiceError_INTERNAL_ERROR BlobstoreServiceError_ErrorCode = 1
- BlobstoreServiceError_URL_TOO_LONG BlobstoreServiceError_ErrorCode = 2
- BlobstoreServiceError_PERMISSION_DENIED BlobstoreServiceError_ErrorCode = 3
- BlobstoreServiceError_BLOB_NOT_FOUND BlobstoreServiceError_ErrorCode = 4
- BlobstoreServiceError_DATA_INDEX_OUT_OF_RANGE BlobstoreServiceError_ErrorCode = 5
- BlobstoreServiceError_BLOB_FETCH_SIZE_TOO_LARGE BlobstoreServiceError_ErrorCode = 6
- BlobstoreServiceError_ARGUMENT_OUT_OF_RANGE BlobstoreServiceError_ErrorCode = 8
- BlobstoreServiceError_INVALID_BLOB_KEY BlobstoreServiceError_ErrorCode = 9
-)
-
-var BlobstoreServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INTERNAL_ERROR",
- 2: "URL_TOO_LONG",
- 3: "PERMISSION_DENIED",
- 4: "BLOB_NOT_FOUND",
- 5: "DATA_INDEX_OUT_OF_RANGE",
- 6: "BLOB_FETCH_SIZE_TOO_LARGE",
- 8: "ARGUMENT_OUT_OF_RANGE",
- 9: "INVALID_BLOB_KEY",
-}
-var BlobstoreServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INTERNAL_ERROR": 1,
- "URL_TOO_LONG": 2,
- "PERMISSION_DENIED": 3,
- "BLOB_NOT_FOUND": 4,
- "DATA_INDEX_OUT_OF_RANGE": 5,
- "BLOB_FETCH_SIZE_TOO_LARGE": 6,
- "ARGUMENT_OUT_OF_RANGE": 8,
- "INVALID_BLOB_KEY": 9,
-}
-
-func (x BlobstoreServiceError_ErrorCode) Enum() *BlobstoreServiceError_ErrorCode {
- p := new(BlobstoreServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x BlobstoreServiceError_ErrorCode) String() string {
- return proto.EnumName(BlobstoreServiceError_ErrorCode_name, int32(x))
-}
-func (x *BlobstoreServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(BlobstoreServiceError_ErrorCode_value, data, "BlobstoreServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = BlobstoreServiceError_ErrorCode(value)
- return nil
-}
-
-type BlobstoreServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BlobstoreServiceError) Reset() { *m = BlobstoreServiceError{} }
-func (m *BlobstoreServiceError) String() string { return proto.CompactTextString(m) }
-func (*BlobstoreServiceError) ProtoMessage() {}
-
-type CreateUploadURLRequest struct {
- SuccessPath *string `protobuf:"bytes,1,req,name=success_path" json:"success_path,omitempty"`
- MaxUploadSizeBytes *int64 `protobuf:"varint,2,opt,name=max_upload_size_bytes" json:"max_upload_size_bytes,omitempty"`
- MaxUploadSizePerBlobBytes *int64 `protobuf:"varint,3,opt,name=max_upload_size_per_blob_bytes" json:"max_upload_size_per_blob_bytes,omitempty"`
- GsBucketName *string `protobuf:"bytes,4,opt,name=gs_bucket_name" json:"gs_bucket_name,omitempty"`
- UrlExpiryTimeSeconds *int32 `protobuf:"varint,5,opt,name=url_expiry_time_seconds" json:"url_expiry_time_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateUploadURLRequest) Reset() { *m = CreateUploadURLRequest{} }
-func (m *CreateUploadURLRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateUploadURLRequest) ProtoMessage() {}
-
-func (m *CreateUploadURLRequest) GetSuccessPath() string {
- if m != nil && m.SuccessPath != nil {
- return *m.SuccessPath
- }
- return ""
-}
-
-func (m *CreateUploadURLRequest) GetMaxUploadSizeBytes() int64 {
- if m != nil && m.MaxUploadSizeBytes != nil {
- return *m.MaxUploadSizeBytes
- }
- return 0
-}
-
-func (m *CreateUploadURLRequest) GetMaxUploadSizePerBlobBytes() int64 {
- if m != nil && m.MaxUploadSizePerBlobBytes != nil {
- return *m.MaxUploadSizePerBlobBytes
- }
- return 0
-}
-
-func (m *CreateUploadURLRequest) GetGsBucketName() string {
- if m != nil && m.GsBucketName != nil {
- return *m.GsBucketName
- }
- return ""
-}
-
-func (m *CreateUploadURLRequest) GetUrlExpiryTimeSeconds() int32 {
- if m != nil && m.UrlExpiryTimeSeconds != nil {
- return *m.UrlExpiryTimeSeconds
- }
- return 0
-}
-
-type CreateUploadURLResponse struct {
- Url *string `protobuf:"bytes,1,req,name=url" json:"url,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateUploadURLResponse) Reset() { *m = CreateUploadURLResponse{} }
-func (m *CreateUploadURLResponse) String() string { return proto.CompactTextString(m) }
-func (*CreateUploadURLResponse) ProtoMessage() {}
-
-func (m *CreateUploadURLResponse) GetUrl() string {
- if m != nil && m.Url != nil {
- return *m.Url
- }
- return ""
-}
-
-type DeleteBlobRequest struct {
- BlobKey []string `protobuf:"bytes,1,rep,name=blob_key" json:"blob_key,omitempty"`
- Token *string `protobuf:"bytes,2,opt,name=token" json:"token,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteBlobRequest) Reset() { *m = DeleteBlobRequest{} }
-func (m *DeleteBlobRequest) String() string { return proto.CompactTextString(m) }
-func (*DeleteBlobRequest) ProtoMessage() {}
-
-func (m *DeleteBlobRequest) GetBlobKey() []string {
- if m != nil {
- return m.BlobKey
- }
- return nil
-}
-
-func (m *DeleteBlobRequest) GetToken() string {
- if m != nil && m.Token != nil {
- return *m.Token
- }
- return ""
-}
-
-type FetchDataRequest struct {
- BlobKey *string `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- StartIndex *int64 `protobuf:"varint,2,req,name=start_index" json:"start_index,omitempty"`
- EndIndex *int64 `protobuf:"varint,3,req,name=end_index" json:"end_index,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FetchDataRequest) Reset() { *m = FetchDataRequest{} }
-func (m *FetchDataRequest) String() string { return proto.CompactTextString(m) }
-func (*FetchDataRequest) ProtoMessage() {}
-
-func (m *FetchDataRequest) GetBlobKey() string {
- if m != nil && m.BlobKey != nil {
- return *m.BlobKey
- }
- return ""
-}
-
-func (m *FetchDataRequest) GetStartIndex() int64 {
- if m != nil && m.StartIndex != nil {
- return *m.StartIndex
- }
- return 0
-}
-
-func (m *FetchDataRequest) GetEndIndex() int64 {
- if m != nil && m.EndIndex != nil {
- return *m.EndIndex
- }
- return 0
-}
-
-type FetchDataResponse struct {
- Data []byte `protobuf:"bytes,1000,req,name=data" json:"data,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FetchDataResponse) Reset() { *m = FetchDataResponse{} }
-func (m *FetchDataResponse) String() string { return proto.CompactTextString(m) }
-func (*FetchDataResponse) ProtoMessage() {}
-
-func (m *FetchDataResponse) GetData() []byte {
- if m != nil {
- return m.Data
- }
- return nil
-}
-
-type CloneBlobRequest struct {
- BlobKey []byte `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- MimeType []byte `protobuf:"bytes,2,req,name=mime_type" json:"mime_type,omitempty"`
- TargetAppId []byte `protobuf:"bytes,3,req,name=target_app_id" json:"target_app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CloneBlobRequest) Reset() { *m = CloneBlobRequest{} }
-func (m *CloneBlobRequest) String() string { return proto.CompactTextString(m) }
-func (*CloneBlobRequest) ProtoMessage() {}
-
-func (m *CloneBlobRequest) GetBlobKey() []byte {
- if m != nil {
- return m.BlobKey
- }
- return nil
-}
-
-func (m *CloneBlobRequest) GetMimeType() []byte {
- if m != nil {
- return m.MimeType
- }
- return nil
-}
-
-func (m *CloneBlobRequest) GetTargetAppId() []byte {
- if m != nil {
- return m.TargetAppId
- }
- return nil
-}
-
-type CloneBlobResponse struct {
- BlobKey []byte `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CloneBlobResponse) Reset() { *m = CloneBlobResponse{} }
-func (m *CloneBlobResponse) String() string { return proto.CompactTextString(m) }
-func (*CloneBlobResponse) ProtoMessage() {}
-
-func (m *CloneBlobResponse) GetBlobKey() []byte {
- if m != nil {
- return m.BlobKey
- }
- return nil
-}
-
-type DecodeBlobKeyRequest struct {
- BlobKey []string `protobuf:"bytes,1,rep,name=blob_key" json:"blob_key,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DecodeBlobKeyRequest) Reset() { *m = DecodeBlobKeyRequest{} }
-func (m *DecodeBlobKeyRequest) String() string { return proto.CompactTextString(m) }
-func (*DecodeBlobKeyRequest) ProtoMessage() {}
-
-func (m *DecodeBlobKeyRequest) GetBlobKey() []string {
- if m != nil {
- return m.BlobKey
- }
- return nil
-}
-
-type DecodeBlobKeyResponse struct {
- Decoded []string `protobuf:"bytes,1,rep,name=decoded" json:"decoded,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DecodeBlobKeyResponse) Reset() { *m = DecodeBlobKeyResponse{} }
-func (m *DecodeBlobKeyResponse) String() string { return proto.CompactTextString(m) }
-func (*DecodeBlobKeyResponse) ProtoMessage() {}
-
-func (m *DecodeBlobKeyResponse) GetDecoded() []string {
- if m != nil {
- return m.Decoded
- }
- return nil
-}
-
-type CreateEncodedGoogleStorageKeyRequest struct {
- Filename *string `protobuf:"bytes,1,req,name=filename" json:"filename,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateEncodedGoogleStorageKeyRequest) Reset() { *m = CreateEncodedGoogleStorageKeyRequest{} }
-func (m *CreateEncodedGoogleStorageKeyRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateEncodedGoogleStorageKeyRequest) ProtoMessage() {}
-
-func (m *CreateEncodedGoogleStorageKeyRequest) GetFilename() string {
- if m != nil && m.Filename != nil {
- return *m.Filename
- }
- return ""
-}
-
-type CreateEncodedGoogleStorageKeyResponse struct {
- BlobKey *string `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateEncodedGoogleStorageKeyResponse) Reset() { *m = CreateEncodedGoogleStorageKeyResponse{} }
-func (m *CreateEncodedGoogleStorageKeyResponse) String() string { return proto.CompactTextString(m) }
-func (*CreateEncodedGoogleStorageKeyResponse) ProtoMessage() {}
-
-func (m *CreateEncodedGoogleStorageKeyResponse) GetBlobKey() string {
- if m != nil && m.BlobKey != nil {
- return *m.BlobKey
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.proto b/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.proto
deleted file mode 100644
index 33b2650..0000000
--- a/vendor/google.golang.org/appengine/internal/blobstore/blobstore_service.proto
+++ /dev/null
@@ -1,71 +0,0 @@
-syntax = "proto2";
-option go_package = "blobstore";
-
-package appengine;
-
-message BlobstoreServiceError {
- enum ErrorCode {
- OK = 0;
- INTERNAL_ERROR = 1;
- URL_TOO_LONG = 2;
- PERMISSION_DENIED = 3;
- BLOB_NOT_FOUND = 4;
- DATA_INDEX_OUT_OF_RANGE = 5;
- BLOB_FETCH_SIZE_TOO_LARGE = 6;
- ARGUMENT_OUT_OF_RANGE = 8;
- INVALID_BLOB_KEY = 9;
- }
-}
-
-message CreateUploadURLRequest {
- required string success_path = 1;
- optional int64 max_upload_size_bytes = 2;
- optional int64 max_upload_size_per_blob_bytes = 3;
- optional string gs_bucket_name = 4;
- optional int32 url_expiry_time_seconds = 5;
-}
-
-message CreateUploadURLResponse {
- required string url = 1;
-}
-
-message DeleteBlobRequest {
- repeated string blob_key = 1;
- optional string token = 2;
-}
-
-message FetchDataRequest {
- required string blob_key = 1;
- required int64 start_index = 2;
- required int64 end_index = 3;
-}
-
-message FetchDataResponse {
- required bytes data = 1000 [ctype = CORD];
-}
-
-message CloneBlobRequest {
- required bytes blob_key = 1;
- required bytes mime_type = 2;
- required bytes target_app_id = 3;
-}
-
-message CloneBlobResponse {
- required bytes blob_key = 1;
-}
-
-message DecodeBlobKeyRequest {
- repeated string blob_key = 1;
-}
-
-message DecodeBlobKeyResponse {
- repeated string decoded = 1;
-}
-
-message CreateEncodedGoogleStorageKeyRequest {
- required string filename = 1;
-}
-
-message CreateEncodedGoogleStorageKeyResponse {
- required string blob_key = 1;
-}
diff --git a/vendor/google.golang.org/appengine/internal/capability/capability_service.pb.go b/vendor/google.golang.org/appengine/internal/capability/capability_service.pb.go
deleted file mode 100644
index e57a04b..0000000
--- a/vendor/google.golang.org/appengine/internal/capability/capability_service.pb.go
+++ /dev/null
@@ -1,125 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/capability/capability_service.proto
-// DO NOT EDIT!
-
-/*
-Package channel is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/capability/capability_service.proto
-
-It has these top-level messages:
- IsEnabledRequest
- IsEnabledResponse
-*/
-package channel
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type IsEnabledResponse_SummaryStatus int32
-
-const (
- IsEnabledResponse_DEFAULT IsEnabledResponse_SummaryStatus = 0
- IsEnabledResponse_ENABLED IsEnabledResponse_SummaryStatus = 1
- IsEnabledResponse_SCHEDULED_FUTURE IsEnabledResponse_SummaryStatus = 2
- IsEnabledResponse_SCHEDULED_NOW IsEnabledResponse_SummaryStatus = 3
- IsEnabledResponse_DISABLED IsEnabledResponse_SummaryStatus = 4
- IsEnabledResponse_UNKNOWN IsEnabledResponse_SummaryStatus = 5
-)
-
-var IsEnabledResponse_SummaryStatus_name = map[int32]string{
- 0: "DEFAULT",
- 1: "ENABLED",
- 2: "SCHEDULED_FUTURE",
- 3: "SCHEDULED_NOW",
- 4: "DISABLED",
- 5: "UNKNOWN",
-}
-var IsEnabledResponse_SummaryStatus_value = map[string]int32{
- "DEFAULT": 0,
- "ENABLED": 1,
- "SCHEDULED_FUTURE": 2,
- "SCHEDULED_NOW": 3,
- "DISABLED": 4,
- "UNKNOWN": 5,
-}
-
-func (x IsEnabledResponse_SummaryStatus) Enum() *IsEnabledResponse_SummaryStatus {
- p := new(IsEnabledResponse_SummaryStatus)
- *p = x
- return p
-}
-func (x IsEnabledResponse_SummaryStatus) String() string {
- return proto.EnumName(IsEnabledResponse_SummaryStatus_name, int32(x))
-}
-func (x *IsEnabledResponse_SummaryStatus) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(IsEnabledResponse_SummaryStatus_value, data, "IsEnabledResponse_SummaryStatus")
- if err != nil {
- return err
- }
- *x = IsEnabledResponse_SummaryStatus(value)
- return nil
-}
-
-type IsEnabledRequest struct {
- Package *string `protobuf:"bytes,1,req,name=package" json:"package,omitempty"`
- Capability []string `protobuf:"bytes,2,rep,name=capability" json:"capability,omitempty"`
- Call []string `protobuf:"bytes,3,rep,name=call" json:"call,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IsEnabledRequest) Reset() { *m = IsEnabledRequest{} }
-func (m *IsEnabledRequest) String() string { return proto.CompactTextString(m) }
-func (*IsEnabledRequest) ProtoMessage() {}
-
-func (m *IsEnabledRequest) GetPackage() string {
- if m != nil && m.Package != nil {
- return *m.Package
- }
- return ""
-}
-
-func (m *IsEnabledRequest) GetCapability() []string {
- if m != nil {
- return m.Capability
- }
- return nil
-}
-
-func (m *IsEnabledRequest) GetCall() []string {
- if m != nil {
- return m.Call
- }
- return nil
-}
-
-type IsEnabledResponse struct {
- SummaryStatus *IsEnabledResponse_SummaryStatus `protobuf:"varint,1,opt,name=summary_status,enum=appengine.IsEnabledResponse_SummaryStatus" json:"summary_status,omitempty"`
- TimeUntilScheduled *int64 `protobuf:"varint,2,opt,name=time_until_scheduled" json:"time_until_scheduled,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IsEnabledResponse) Reset() { *m = IsEnabledResponse{} }
-func (m *IsEnabledResponse) String() string { return proto.CompactTextString(m) }
-func (*IsEnabledResponse) ProtoMessage() {}
-
-func (m *IsEnabledResponse) GetSummaryStatus() IsEnabledResponse_SummaryStatus {
- if m != nil && m.SummaryStatus != nil {
- return *m.SummaryStatus
- }
- return IsEnabledResponse_DEFAULT
-}
-
-func (m *IsEnabledResponse) GetTimeUntilScheduled() int64 {
- if m != nil && m.TimeUntilScheduled != nil {
- return *m.TimeUntilScheduled
- }
- return 0
-}
diff --git a/vendor/google.golang.org/appengine/internal/capability/capability_service.proto b/vendor/google.golang.org/appengine/internal/capability/capability_service.proto
deleted file mode 100644
index 8f7256d..0000000
--- a/vendor/google.golang.org/appengine/internal/capability/capability_service.proto
+++ /dev/null
@@ -1,28 +0,0 @@
-syntax = "proto2";
-option go_package = "channel";
-
-package appengine;
-
-message IsEnabledRequest {
- required string package = 1;
- repeated string capability = 2;
- repeated string call = 3;
-}
-
-message IsEnabledResponse {
- enum SummaryStatus {
- DEFAULT = 0;
- ENABLED = 1;
- SCHEDULED_FUTURE = 2;
- SCHEDULED_NOW = 3;
- DISABLED = 4;
- UNKNOWN = 5;
- }
- optional SummaryStatus summary_status = 1;
-
- optional int64 time_until_scheduled = 2;
-}
-
-service CapabilityService {
- rpc IsEnabled(IsEnabledRequest) returns (IsEnabledResponse) {};
-}
diff --git a/vendor/google.golang.org/appengine/internal/channel/channel_service.pb.go b/vendor/google.golang.org/appengine/internal/channel/channel_service.pb.go
deleted file mode 100644
index 7b8d00c..0000000
--- a/vendor/google.golang.org/appengine/internal/channel/channel_service.pb.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/channel/channel_service.proto
-// DO NOT EDIT!
-
-/*
-Package channel is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/channel/channel_service.proto
-
-It has these top-level messages:
- ChannelServiceError
- CreateChannelRequest
- CreateChannelResponse
- SendMessageRequest
-*/
-package channel
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type ChannelServiceError_ErrorCode int32
-
-const (
- ChannelServiceError_OK ChannelServiceError_ErrorCode = 0
- ChannelServiceError_INTERNAL_ERROR ChannelServiceError_ErrorCode = 1
- ChannelServiceError_INVALID_CHANNEL_KEY ChannelServiceError_ErrorCode = 2
- ChannelServiceError_BAD_MESSAGE ChannelServiceError_ErrorCode = 3
- ChannelServiceError_INVALID_CHANNEL_TOKEN_DURATION ChannelServiceError_ErrorCode = 4
- ChannelServiceError_APPID_ALIAS_REQUIRED ChannelServiceError_ErrorCode = 5
-)
-
-var ChannelServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INTERNAL_ERROR",
- 2: "INVALID_CHANNEL_KEY",
- 3: "BAD_MESSAGE",
- 4: "INVALID_CHANNEL_TOKEN_DURATION",
- 5: "APPID_ALIAS_REQUIRED",
-}
-var ChannelServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INTERNAL_ERROR": 1,
- "INVALID_CHANNEL_KEY": 2,
- "BAD_MESSAGE": 3,
- "INVALID_CHANNEL_TOKEN_DURATION": 4,
- "APPID_ALIAS_REQUIRED": 5,
-}
-
-func (x ChannelServiceError_ErrorCode) Enum() *ChannelServiceError_ErrorCode {
- p := new(ChannelServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x ChannelServiceError_ErrorCode) String() string {
- return proto.EnumName(ChannelServiceError_ErrorCode_name, int32(x))
-}
-func (x *ChannelServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ChannelServiceError_ErrorCode_value, data, "ChannelServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = ChannelServiceError_ErrorCode(value)
- return nil
-}
-
-type ChannelServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ChannelServiceError) Reset() { *m = ChannelServiceError{} }
-func (m *ChannelServiceError) String() string { return proto.CompactTextString(m) }
-func (*ChannelServiceError) ProtoMessage() {}
-
-type CreateChannelRequest struct {
- ApplicationKey *string `protobuf:"bytes,1,req,name=application_key" json:"application_key,omitempty"`
- DurationMinutes *int32 `protobuf:"varint,2,opt,name=duration_minutes" json:"duration_minutes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateChannelRequest) Reset() { *m = CreateChannelRequest{} }
-func (m *CreateChannelRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateChannelRequest) ProtoMessage() {}
-
-func (m *CreateChannelRequest) GetApplicationKey() string {
- if m != nil && m.ApplicationKey != nil {
- return *m.ApplicationKey
- }
- return ""
-}
-
-func (m *CreateChannelRequest) GetDurationMinutes() int32 {
- if m != nil && m.DurationMinutes != nil {
- return *m.DurationMinutes
- }
- return 0
-}
-
-type CreateChannelResponse struct {
- Token *string `protobuf:"bytes,2,opt,name=token" json:"token,omitempty"`
- DurationMinutes *int32 `protobuf:"varint,3,opt,name=duration_minutes" json:"duration_minutes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateChannelResponse) Reset() { *m = CreateChannelResponse{} }
-func (m *CreateChannelResponse) String() string { return proto.CompactTextString(m) }
-func (*CreateChannelResponse) ProtoMessage() {}
-
-func (m *CreateChannelResponse) GetToken() string {
- if m != nil && m.Token != nil {
- return *m.Token
- }
- return ""
-}
-
-func (m *CreateChannelResponse) GetDurationMinutes() int32 {
- if m != nil && m.DurationMinutes != nil {
- return *m.DurationMinutes
- }
- return 0
-}
-
-type SendMessageRequest struct {
- ApplicationKey *string `protobuf:"bytes,1,req,name=application_key" json:"application_key,omitempty"`
- Message *string `protobuf:"bytes,2,req,name=message" json:"message,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SendMessageRequest) Reset() { *m = SendMessageRequest{} }
-func (m *SendMessageRequest) String() string { return proto.CompactTextString(m) }
-func (*SendMessageRequest) ProtoMessage() {}
-
-func (m *SendMessageRequest) GetApplicationKey() string {
- if m != nil && m.ApplicationKey != nil {
- return *m.ApplicationKey
- }
- return ""
-}
-
-func (m *SendMessageRequest) GetMessage() string {
- if m != nil && m.Message != nil {
- return *m.Message
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/channel/channel_service.proto b/vendor/google.golang.org/appengine/internal/channel/channel_service.proto
deleted file mode 100644
index 2b5a918..0000000
--- a/vendor/google.golang.org/appengine/internal/channel/channel_service.proto
+++ /dev/null
@@ -1,30 +0,0 @@
-syntax = "proto2";
-option go_package = "channel";
-
-package appengine;
-
-message ChannelServiceError {
- enum ErrorCode {
- OK = 0;
- INTERNAL_ERROR = 1;
- INVALID_CHANNEL_KEY = 2;
- BAD_MESSAGE = 3;
- INVALID_CHANNEL_TOKEN_DURATION = 4;
- APPID_ALIAS_REQUIRED = 5;
- }
-}
-
-message CreateChannelRequest {
- required string application_key = 1;
- optional int32 duration_minutes = 2;
-}
-
-message CreateChannelResponse {
- optional string token = 2;
- optional int32 duration_minutes = 3;
-}
-
-message SendMessageRequest {
- required string application_key = 1;
- required string message = 2;
-}
diff --git a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go
deleted file mode 100644
index 8613cb7..0000000
--- a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.pb.go
+++ /dev/null
@@ -1,2778 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/datastore/datastore_v3.proto
-// DO NOT EDIT!
-
-/*
-Package datastore is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/datastore/datastore_v3.proto
-
-It has these top-level messages:
- Action
- PropertyValue
- Property
- Path
- Reference
- User
- EntityProto
- CompositeProperty
- Index
- CompositeIndex
- IndexPostfix
- IndexPosition
- Snapshot
- InternalHeader
- Transaction
- Query
- CompiledQuery
- CompiledCursor
- Cursor
- Error
- Cost
- GetRequest
- GetResponse
- PutRequest
- PutResponse
- TouchRequest
- TouchResponse
- DeleteRequest
- DeleteResponse
- NextRequest
- QueryResult
- AllocateIdsRequest
- AllocateIdsResponse
- CompositeIndices
- AddActionsRequest
- AddActionsResponse
- BeginTransactionRequest
- CommitResponse
-*/
-package datastore
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type Property_Meaning int32
-
-const (
- Property_NO_MEANING Property_Meaning = 0
- Property_BLOB Property_Meaning = 14
- Property_TEXT Property_Meaning = 15
- Property_BYTESTRING Property_Meaning = 16
- Property_ATOM_CATEGORY Property_Meaning = 1
- Property_ATOM_LINK Property_Meaning = 2
- Property_ATOM_TITLE Property_Meaning = 3
- Property_ATOM_CONTENT Property_Meaning = 4
- Property_ATOM_SUMMARY Property_Meaning = 5
- Property_ATOM_AUTHOR Property_Meaning = 6
- Property_GD_WHEN Property_Meaning = 7
- Property_GD_EMAIL Property_Meaning = 8
- Property_GEORSS_POINT Property_Meaning = 9
- Property_GD_IM Property_Meaning = 10
- Property_GD_PHONENUMBER Property_Meaning = 11
- Property_GD_POSTALADDRESS Property_Meaning = 12
- Property_GD_RATING Property_Meaning = 13
- Property_BLOBKEY Property_Meaning = 17
- Property_ENTITY_PROTO Property_Meaning = 19
- Property_INDEX_VALUE Property_Meaning = 18
-)
-
-var Property_Meaning_name = map[int32]string{
- 0: "NO_MEANING",
- 14: "BLOB",
- 15: "TEXT",
- 16: "BYTESTRING",
- 1: "ATOM_CATEGORY",
- 2: "ATOM_LINK",
- 3: "ATOM_TITLE",
- 4: "ATOM_CONTENT",
- 5: "ATOM_SUMMARY",
- 6: "ATOM_AUTHOR",
- 7: "GD_WHEN",
- 8: "GD_EMAIL",
- 9: "GEORSS_POINT",
- 10: "GD_IM",
- 11: "GD_PHONENUMBER",
- 12: "GD_POSTALADDRESS",
- 13: "GD_RATING",
- 17: "BLOBKEY",
- 19: "ENTITY_PROTO",
- 18: "INDEX_VALUE",
-}
-var Property_Meaning_value = map[string]int32{
- "NO_MEANING": 0,
- "BLOB": 14,
- "TEXT": 15,
- "BYTESTRING": 16,
- "ATOM_CATEGORY": 1,
- "ATOM_LINK": 2,
- "ATOM_TITLE": 3,
- "ATOM_CONTENT": 4,
- "ATOM_SUMMARY": 5,
- "ATOM_AUTHOR": 6,
- "GD_WHEN": 7,
- "GD_EMAIL": 8,
- "GEORSS_POINT": 9,
- "GD_IM": 10,
- "GD_PHONENUMBER": 11,
- "GD_POSTALADDRESS": 12,
- "GD_RATING": 13,
- "BLOBKEY": 17,
- "ENTITY_PROTO": 19,
- "INDEX_VALUE": 18,
-}
-
-func (x Property_Meaning) Enum() *Property_Meaning {
- p := new(Property_Meaning)
- *p = x
- return p
-}
-func (x Property_Meaning) String() string {
- return proto.EnumName(Property_Meaning_name, int32(x))
-}
-func (x *Property_Meaning) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Property_Meaning_value, data, "Property_Meaning")
- if err != nil {
- return err
- }
- *x = Property_Meaning(value)
- return nil
-}
-
-type Property_FtsTokenizationOption int32
-
-const (
- Property_HTML Property_FtsTokenizationOption = 1
- Property_ATOM Property_FtsTokenizationOption = 2
-)
-
-var Property_FtsTokenizationOption_name = map[int32]string{
- 1: "HTML",
- 2: "ATOM",
-}
-var Property_FtsTokenizationOption_value = map[string]int32{
- "HTML": 1,
- "ATOM": 2,
-}
-
-func (x Property_FtsTokenizationOption) Enum() *Property_FtsTokenizationOption {
- p := new(Property_FtsTokenizationOption)
- *p = x
- return p
-}
-func (x Property_FtsTokenizationOption) String() string {
- return proto.EnumName(Property_FtsTokenizationOption_name, int32(x))
-}
-func (x *Property_FtsTokenizationOption) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Property_FtsTokenizationOption_value, data, "Property_FtsTokenizationOption")
- if err != nil {
- return err
- }
- *x = Property_FtsTokenizationOption(value)
- return nil
-}
-
-type EntityProto_Kind int32
-
-const (
- EntityProto_GD_CONTACT EntityProto_Kind = 1
- EntityProto_GD_EVENT EntityProto_Kind = 2
- EntityProto_GD_MESSAGE EntityProto_Kind = 3
-)
-
-var EntityProto_Kind_name = map[int32]string{
- 1: "GD_CONTACT",
- 2: "GD_EVENT",
- 3: "GD_MESSAGE",
-}
-var EntityProto_Kind_value = map[string]int32{
- "GD_CONTACT": 1,
- "GD_EVENT": 2,
- "GD_MESSAGE": 3,
-}
-
-func (x EntityProto_Kind) Enum() *EntityProto_Kind {
- p := new(EntityProto_Kind)
- *p = x
- return p
-}
-func (x EntityProto_Kind) String() string {
- return proto.EnumName(EntityProto_Kind_name, int32(x))
-}
-func (x *EntityProto_Kind) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(EntityProto_Kind_value, data, "EntityProto_Kind")
- if err != nil {
- return err
- }
- *x = EntityProto_Kind(value)
- return nil
-}
-
-type Index_Property_Direction int32
-
-const (
- Index_Property_ASCENDING Index_Property_Direction = 1
- Index_Property_DESCENDING Index_Property_Direction = 2
-)
-
-var Index_Property_Direction_name = map[int32]string{
- 1: "ASCENDING",
- 2: "DESCENDING",
-}
-var Index_Property_Direction_value = map[string]int32{
- "ASCENDING": 1,
- "DESCENDING": 2,
-}
-
-func (x Index_Property_Direction) Enum() *Index_Property_Direction {
- p := new(Index_Property_Direction)
- *p = x
- return p
-}
-func (x Index_Property_Direction) String() string {
- return proto.EnumName(Index_Property_Direction_name, int32(x))
-}
-func (x *Index_Property_Direction) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Index_Property_Direction_value, data, "Index_Property_Direction")
- if err != nil {
- return err
- }
- *x = Index_Property_Direction(value)
- return nil
-}
-
-type CompositeIndex_State int32
-
-const (
- CompositeIndex_WRITE_ONLY CompositeIndex_State = 1
- CompositeIndex_READ_WRITE CompositeIndex_State = 2
- CompositeIndex_DELETED CompositeIndex_State = 3
- CompositeIndex_ERROR CompositeIndex_State = 4
-)
-
-var CompositeIndex_State_name = map[int32]string{
- 1: "WRITE_ONLY",
- 2: "READ_WRITE",
- 3: "DELETED",
- 4: "ERROR",
-}
-var CompositeIndex_State_value = map[string]int32{
- "WRITE_ONLY": 1,
- "READ_WRITE": 2,
- "DELETED": 3,
- "ERROR": 4,
-}
-
-func (x CompositeIndex_State) Enum() *CompositeIndex_State {
- p := new(CompositeIndex_State)
- *p = x
- return p
-}
-func (x CompositeIndex_State) String() string {
- return proto.EnumName(CompositeIndex_State_name, int32(x))
-}
-func (x *CompositeIndex_State) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(CompositeIndex_State_value, data, "CompositeIndex_State")
- if err != nil {
- return err
- }
- *x = CompositeIndex_State(value)
- return nil
-}
-
-type Snapshot_Status int32
-
-const (
- Snapshot_INACTIVE Snapshot_Status = 0
- Snapshot_ACTIVE Snapshot_Status = 1
-)
-
-var Snapshot_Status_name = map[int32]string{
- 0: "INACTIVE",
- 1: "ACTIVE",
-}
-var Snapshot_Status_value = map[string]int32{
- "INACTIVE": 0,
- "ACTIVE": 1,
-}
-
-func (x Snapshot_Status) Enum() *Snapshot_Status {
- p := new(Snapshot_Status)
- *p = x
- return p
-}
-func (x Snapshot_Status) String() string {
- return proto.EnumName(Snapshot_Status_name, int32(x))
-}
-func (x *Snapshot_Status) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Snapshot_Status_value, data, "Snapshot_Status")
- if err != nil {
- return err
- }
- *x = Snapshot_Status(value)
- return nil
-}
-
-type Query_Hint int32
-
-const (
- Query_ORDER_FIRST Query_Hint = 1
- Query_ANCESTOR_FIRST Query_Hint = 2
- Query_FILTER_FIRST Query_Hint = 3
-)
-
-var Query_Hint_name = map[int32]string{
- 1: "ORDER_FIRST",
- 2: "ANCESTOR_FIRST",
- 3: "FILTER_FIRST",
-}
-var Query_Hint_value = map[string]int32{
- "ORDER_FIRST": 1,
- "ANCESTOR_FIRST": 2,
- "FILTER_FIRST": 3,
-}
-
-func (x Query_Hint) Enum() *Query_Hint {
- p := new(Query_Hint)
- *p = x
- return p
-}
-func (x Query_Hint) String() string {
- return proto.EnumName(Query_Hint_name, int32(x))
-}
-func (x *Query_Hint) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Query_Hint_value, data, "Query_Hint")
- if err != nil {
- return err
- }
- *x = Query_Hint(value)
- return nil
-}
-
-type Query_Filter_Operator int32
-
-const (
- Query_Filter_LESS_THAN Query_Filter_Operator = 1
- Query_Filter_LESS_THAN_OR_EQUAL Query_Filter_Operator = 2
- Query_Filter_GREATER_THAN Query_Filter_Operator = 3
- Query_Filter_GREATER_THAN_OR_EQUAL Query_Filter_Operator = 4
- Query_Filter_EQUAL Query_Filter_Operator = 5
- Query_Filter_IN Query_Filter_Operator = 6
- Query_Filter_EXISTS Query_Filter_Operator = 7
-)
-
-var Query_Filter_Operator_name = map[int32]string{
- 1: "LESS_THAN",
- 2: "LESS_THAN_OR_EQUAL",
- 3: "GREATER_THAN",
- 4: "GREATER_THAN_OR_EQUAL",
- 5: "EQUAL",
- 6: "IN",
- 7: "EXISTS",
-}
-var Query_Filter_Operator_value = map[string]int32{
- "LESS_THAN": 1,
- "LESS_THAN_OR_EQUAL": 2,
- "GREATER_THAN": 3,
- "GREATER_THAN_OR_EQUAL": 4,
- "EQUAL": 5,
- "IN": 6,
- "EXISTS": 7,
-}
-
-func (x Query_Filter_Operator) Enum() *Query_Filter_Operator {
- p := new(Query_Filter_Operator)
- *p = x
- return p
-}
-func (x Query_Filter_Operator) String() string {
- return proto.EnumName(Query_Filter_Operator_name, int32(x))
-}
-func (x *Query_Filter_Operator) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Query_Filter_Operator_value, data, "Query_Filter_Operator")
- if err != nil {
- return err
- }
- *x = Query_Filter_Operator(value)
- return nil
-}
-
-type Query_Order_Direction int32
-
-const (
- Query_Order_ASCENDING Query_Order_Direction = 1
- Query_Order_DESCENDING Query_Order_Direction = 2
-)
-
-var Query_Order_Direction_name = map[int32]string{
- 1: "ASCENDING",
- 2: "DESCENDING",
-}
-var Query_Order_Direction_value = map[string]int32{
- "ASCENDING": 1,
- "DESCENDING": 2,
-}
-
-func (x Query_Order_Direction) Enum() *Query_Order_Direction {
- p := new(Query_Order_Direction)
- *p = x
- return p
-}
-func (x Query_Order_Direction) String() string {
- return proto.EnumName(Query_Order_Direction_name, int32(x))
-}
-func (x *Query_Order_Direction) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Query_Order_Direction_value, data, "Query_Order_Direction")
- if err != nil {
- return err
- }
- *x = Query_Order_Direction(value)
- return nil
-}
-
-type Error_ErrorCode int32
-
-const (
- Error_BAD_REQUEST Error_ErrorCode = 1
- Error_CONCURRENT_TRANSACTION Error_ErrorCode = 2
- Error_INTERNAL_ERROR Error_ErrorCode = 3
- Error_NEED_INDEX Error_ErrorCode = 4
- Error_TIMEOUT Error_ErrorCode = 5
- Error_PERMISSION_DENIED Error_ErrorCode = 6
- Error_BIGTABLE_ERROR Error_ErrorCode = 7
- Error_COMMITTED_BUT_STILL_APPLYING Error_ErrorCode = 8
- Error_CAPABILITY_DISABLED Error_ErrorCode = 9
- Error_TRY_ALTERNATE_BACKEND Error_ErrorCode = 10
- Error_SAFE_TIME_TOO_OLD Error_ErrorCode = 11
-)
-
-var Error_ErrorCode_name = map[int32]string{
- 1: "BAD_REQUEST",
- 2: "CONCURRENT_TRANSACTION",
- 3: "INTERNAL_ERROR",
- 4: "NEED_INDEX",
- 5: "TIMEOUT",
- 6: "PERMISSION_DENIED",
- 7: "BIGTABLE_ERROR",
- 8: "COMMITTED_BUT_STILL_APPLYING",
- 9: "CAPABILITY_DISABLED",
- 10: "TRY_ALTERNATE_BACKEND",
- 11: "SAFE_TIME_TOO_OLD",
-}
-var Error_ErrorCode_value = map[string]int32{
- "BAD_REQUEST": 1,
- "CONCURRENT_TRANSACTION": 2,
- "INTERNAL_ERROR": 3,
- "NEED_INDEX": 4,
- "TIMEOUT": 5,
- "PERMISSION_DENIED": 6,
- "BIGTABLE_ERROR": 7,
- "COMMITTED_BUT_STILL_APPLYING": 8,
- "CAPABILITY_DISABLED": 9,
- "TRY_ALTERNATE_BACKEND": 10,
- "SAFE_TIME_TOO_OLD": 11,
-}
-
-func (x Error_ErrorCode) Enum() *Error_ErrorCode {
- p := new(Error_ErrorCode)
- *p = x
- return p
-}
-func (x Error_ErrorCode) String() string {
- return proto.EnumName(Error_ErrorCode_name, int32(x))
-}
-func (x *Error_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Error_ErrorCode_value, data, "Error_ErrorCode")
- if err != nil {
- return err
- }
- *x = Error_ErrorCode(value)
- return nil
-}
-
-type PutRequest_AutoIdPolicy int32
-
-const (
- PutRequest_CURRENT PutRequest_AutoIdPolicy = 0
- PutRequest_SEQUENTIAL PutRequest_AutoIdPolicy = 1
-)
-
-var PutRequest_AutoIdPolicy_name = map[int32]string{
- 0: "CURRENT",
- 1: "SEQUENTIAL",
-}
-var PutRequest_AutoIdPolicy_value = map[string]int32{
- "CURRENT": 0,
- "SEQUENTIAL": 1,
-}
-
-func (x PutRequest_AutoIdPolicy) Enum() *PutRequest_AutoIdPolicy {
- p := new(PutRequest_AutoIdPolicy)
- *p = x
- return p
-}
-func (x PutRequest_AutoIdPolicy) String() string {
- return proto.EnumName(PutRequest_AutoIdPolicy_name, int32(x))
-}
-func (x *PutRequest_AutoIdPolicy) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(PutRequest_AutoIdPolicy_value, data, "PutRequest_AutoIdPolicy")
- if err != nil {
- return err
- }
- *x = PutRequest_AutoIdPolicy(value)
- return nil
-}
-
-type Action struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Action) Reset() { *m = Action{} }
-func (m *Action) String() string { return proto.CompactTextString(m) }
-func (*Action) ProtoMessage() {}
-
-type PropertyValue struct {
- Int64Value *int64 `protobuf:"varint,1,opt,name=int64Value" json:"int64Value,omitempty"`
- BooleanValue *bool `protobuf:"varint,2,opt,name=booleanValue" json:"booleanValue,omitempty"`
- StringValue *string `protobuf:"bytes,3,opt,name=stringValue" json:"stringValue,omitempty"`
- DoubleValue *float64 `protobuf:"fixed64,4,opt,name=doubleValue" json:"doubleValue,omitempty"`
- Pointvalue *PropertyValue_PointValue `protobuf:"group,5,opt,name=PointValue" json:"pointvalue,omitempty"`
- Uservalue *PropertyValue_UserValue `protobuf:"group,8,opt,name=UserValue" json:"uservalue,omitempty"`
- Referencevalue *PropertyValue_ReferenceValue `protobuf:"group,12,opt,name=ReferenceValue" json:"referencevalue,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PropertyValue) Reset() { *m = PropertyValue{} }
-func (m *PropertyValue) String() string { return proto.CompactTextString(m) }
-func (*PropertyValue) ProtoMessage() {}
-
-func (m *PropertyValue) GetInt64Value() int64 {
- if m != nil && m.Int64Value != nil {
- return *m.Int64Value
- }
- return 0
-}
-
-func (m *PropertyValue) GetBooleanValue() bool {
- if m != nil && m.BooleanValue != nil {
- return *m.BooleanValue
- }
- return false
-}
-
-func (m *PropertyValue) GetStringValue() string {
- if m != nil && m.StringValue != nil {
- return *m.StringValue
- }
- return ""
-}
-
-func (m *PropertyValue) GetDoubleValue() float64 {
- if m != nil && m.DoubleValue != nil {
- return *m.DoubleValue
- }
- return 0
-}
-
-func (m *PropertyValue) GetPointvalue() *PropertyValue_PointValue {
- if m != nil {
- return m.Pointvalue
- }
- return nil
-}
-
-func (m *PropertyValue) GetUservalue() *PropertyValue_UserValue {
- if m != nil {
- return m.Uservalue
- }
- return nil
-}
-
-func (m *PropertyValue) GetReferencevalue() *PropertyValue_ReferenceValue {
- if m != nil {
- return m.Referencevalue
- }
- return nil
-}
-
-type PropertyValue_PointValue struct {
- X *float64 `protobuf:"fixed64,6,req,name=x" json:"x,omitempty"`
- Y *float64 `protobuf:"fixed64,7,req,name=y" json:"y,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PropertyValue_PointValue) Reset() { *m = PropertyValue_PointValue{} }
-func (m *PropertyValue_PointValue) String() string { return proto.CompactTextString(m) }
-func (*PropertyValue_PointValue) ProtoMessage() {}
-
-func (m *PropertyValue_PointValue) GetX() float64 {
- if m != nil && m.X != nil {
- return *m.X
- }
- return 0
-}
-
-func (m *PropertyValue_PointValue) GetY() float64 {
- if m != nil && m.Y != nil {
- return *m.Y
- }
- return 0
-}
-
-type PropertyValue_UserValue struct {
- Email *string `protobuf:"bytes,9,req,name=email" json:"email,omitempty"`
- AuthDomain *string `protobuf:"bytes,10,req,name=auth_domain" json:"auth_domain,omitempty"`
- Nickname *string `protobuf:"bytes,11,opt,name=nickname" json:"nickname,omitempty"`
- FederatedIdentity *string `protobuf:"bytes,21,opt,name=federated_identity" json:"federated_identity,omitempty"`
- FederatedProvider *string `protobuf:"bytes,22,opt,name=federated_provider" json:"federated_provider,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PropertyValue_UserValue) Reset() { *m = PropertyValue_UserValue{} }
-func (m *PropertyValue_UserValue) String() string { return proto.CompactTextString(m) }
-func (*PropertyValue_UserValue) ProtoMessage() {}
-
-func (m *PropertyValue_UserValue) GetEmail() string {
- if m != nil && m.Email != nil {
- return *m.Email
- }
- return ""
-}
-
-func (m *PropertyValue_UserValue) GetAuthDomain() string {
- if m != nil && m.AuthDomain != nil {
- return *m.AuthDomain
- }
- return ""
-}
-
-func (m *PropertyValue_UserValue) GetNickname() string {
- if m != nil && m.Nickname != nil {
- return *m.Nickname
- }
- return ""
-}
-
-func (m *PropertyValue_UserValue) GetFederatedIdentity() string {
- if m != nil && m.FederatedIdentity != nil {
- return *m.FederatedIdentity
- }
- return ""
-}
-
-func (m *PropertyValue_UserValue) GetFederatedProvider() string {
- if m != nil && m.FederatedProvider != nil {
- return *m.FederatedProvider
- }
- return ""
-}
-
-type PropertyValue_ReferenceValue struct {
- App *string `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
- NameSpace *string `protobuf:"bytes,20,opt,name=name_space" json:"name_space,omitempty"`
- Pathelement []*PropertyValue_ReferenceValue_PathElement `protobuf:"group,14,rep,name=PathElement" json:"pathelement,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PropertyValue_ReferenceValue) Reset() { *m = PropertyValue_ReferenceValue{} }
-func (m *PropertyValue_ReferenceValue) String() string { return proto.CompactTextString(m) }
-func (*PropertyValue_ReferenceValue) ProtoMessage() {}
-
-func (m *PropertyValue_ReferenceValue) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-func (m *PropertyValue_ReferenceValue) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *PropertyValue_ReferenceValue) GetPathelement() []*PropertyValue_ReferenceValue_PathElement {
- if m != nil {
- return m.Pathelement
- }
- return nil
-}
-
-type PropertyValue_ReferenceValue_PathElement struct {
- Type *string `protobuf:"bytes,15,req,name=type" json:"type,omitempty"`
- Id *int64 `protobuf:"varint,16,opt,name=id" json:"id,omitempty"`
- Name *string `protobuf:"bytes,17,opt,name=name" json:"name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PropertyValue_ReferenceValue_PathElement) Reset() {
- *m = PropertyValue_ReferenceValue_PathElement{}
-}
-func (m *PropertyValue_ReferenceValue_PathElement) String() string { return proto.CompactTextString(m) }
-func (*PropertyValue_ReferenceValue_PathElement) ProtoMessage() {}
-
-func (m *PropertyValue_ReferenceValue_PathElement) GetType() string {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return ""
-}
-
-func (m *PropertyValue_ReferenceValue_PathElement) GetId() int64 {
- if m != nil && m.Id != nil {
- return *m.Id
- }
- return 0
-}
-
-func (m *PropertyValue_ReferenceValue_PathElement) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-type Property struct {
- Meaning *Property_Meaning `protobuf:"varint,1,opt,name=meaning,enum=appengine.Property_Meaning,def=0" json:"meaning,omitempty"`
- MeaningUri *string `protobuf:"bytes,2,opt,name=meaning_uri" json:"meaning_uri,omitempty"`
- Name *string `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
- Value *PropertyValue `protobuf:"bytes,5,req,name=value" json:"value,omitempty"`
- Multiple *bool `protobuf:"varint,4,req,name=multiple" json:"multiple,omitempty"`
- Searchable *bool `protobuf:"varint,6,opt,name=searchable,def=0" json:"searchable,omitempty"`
- FtsTokenizationOption *Property_FtsTokenizationOption `protobuf:"varint,8,opt,name=fts_tokenization_option,enum=appengine.Property_FtsTokenizationOption" json:"fts_tokenization_option,omitempty"`
- Locale *string `protobuf:"bytes,9,opt,name=locale,def=en" json:"locale,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Property) Reset() { *m = Property{} }
-func (m *Property) String() string { return proto.CompactTextString(m) }
-func (*Property) ProtoMessage() {}
-
-const Default_Property_Meaning Property_Meaning = Property_NO_MEANING
-const Default_Property_Searchable bool = false
-const Default_Property_Locale string = "en"
-
-func (m *Property) GetMeaning() Property_Meaning {
- if m != nil && m.Meaning != nil {
- return *m.Meaning
- }
- return Default_Property_Meaning
-}
-
-func (m *Property) GetMeaningUri() string {
- if m != nil && m.MeaningUri != nil {
- return *m.MeaningUri
- }
- return ""
-}
-
-func (m *Property) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *Property) GetValue() *PropertyValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *Property) GetMultiple() bool {
- if m != nil && m.Multiple != nil {
- return *m.Multiple
- }
- return false
-}
-
-func (m *Property) GetSearchable() bool {
- if m != nil && m.Searchable != nil {
- return *m.Searchable
- }
- return Default_Property_Searchable
-}
-
-func (m *Property) GetFtsTokenizationOption() Property_FtsTokenizationOption {
- if m != nil && m.FtsTokenizationOption != nil {
- return *m.FtsTokenizationOption
- }
- return Property_HTML
-}
-
-func (m *Property) GetLocale() string {
- if m != nil && m.Locale != nil {
- return *m.Locale
- }
- return Default_Property_Locale
-}
-
-type Path struct {
- Element []*Path_Element `protobuf:"group,1,rep,name=Element" json:"element,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Path) Reset() { *m = Path{} }
-func (m *Path) String() string { return proto.CompactTextString(m) }
-func (*Path) ProtoMessage() {}
-
-func (m *Path) GetElement() []*Path_Element {
- if m != nil {
- return m.Element
- }
- return nil
-}
-
-type Path_Element struct {
- Type *string `protobuf:"bytes,2,req,name=type" json:"type,omitempty"`
- Id *int64 `protobuf:"varint,3,opt,name=id" json:"id,omitempty"`
- Name *string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Path_Element) Reset() { *m = Path_Element{} }
-func (m *Path_Element) String() string { return proto.CompactTextString(m) }
-func (*Path_Element) ProtoMessage() {}
-
-func (m *Path_Element) GetType() string {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return ""
-}
-
-func (m *Path_Element) GetId() int64 {
- if m != nil && m.Id != nil {
- return *m.Id
- }
- return 0
-}
-
-func (m *Path_Element) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-type Reference struct {
- App *string `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
- NameSpace *string `protobuf:"bytes,20,opt,name=name_space" json:"name_space,omitempty"`
- Path *Path `protobuf:"bytes,14,req,name=path" json:"path,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Reference) Reset() { *m = Reference{} }
-func (m *Reference) String() string { return proto.CompactTextString(m) }
-func (*Reference) ProtoMessage() {}
-
-func (m *Reference) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-func (m *Reference) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *Reference) GetPath() *Path {
- if m != nil {
- return m.Path
- }
- return nil
-}
-
-type User struct {
- Email *string `protobuf:"bytes,1,req,name=email" json:"email,omitempty"`
- AuthDomain *string `protobuf:"bytes,2,req,name=auth_domain" json:"auth_domain,omitempty"`
- Nickname *string `protobuf:"bytes,3,opt,name=nickname" json:"nickname,omitempty"`
- FederatedIdentity *string `protobuf:"bytes,6,opt,name=federated_identity" json:"federated_identity,omitempty"`
- FederatedProvider *string `protobuf:"bytes,7,opt,name=federated_provider" json:"federated_provider,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *User) Reset() { *m = User{} }
-func (m *User) String() string { return proto.CompactTextString(m) }
-func (*User) ProtoMessage() {}
-
-func (m *User) GetEmail() string {
- if m != nil && m.Email != nil {
- return *m.Email
- }
- return ""
-}
-
-func (m *User) GetAuthDomain() string {
- if m != nil && m.AuthDomain != nil {
- return *m.AuthDomain
- }
- return ""
-}
-
-func (m *User) GetNickname() string {
- if m != nil && m.Nickname != nil {
- return *m.Nickname
- }
- return ""
-}
-
-func (m *User) GetFederatedIdentity() string {
- if m != nil && m.FederatedIdentity != nil {
- return *m.FederatedIdentity
- }
- return ""
-}
-
-func (m *User) GetFederatedProvider() string {
- if m != nil && m.FederatedProvider != nil {
- return *m.FederatedProvider
- }
- return ""
-}
-
-type EntityProto struct {
- Key *Reference `protobuf:"bytes,13,req,name=key" json:"key,omitempty"`
- EntityGroup *Path `protobuf:"bytes,16,req,name=entity_group" json:"entity_group,omitempty"`
- Owner *User `protobuf:"bytes,17,opt,name=owner" json:"owner,omitempty"`
- Kind *EntityProto_Kind `protobuf:"varint,4,opt,name=kind,enum=appengine.EntityProto_Kind" json:"kind,omitempty"`
- KindUri *string `protobuf:"bytes,5,opt,name=kind_uri" json:"kind_uri,omitempty"`
- Property []*Property `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
- RawProperty []*Property `protobuf:"bytes,15,rep,name=raw_property" json:"raw_property,omitempty"`
- Rank *int32 `protobuf:"varint,18,opt,name=rank" json:"rank,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *EntityProto) Reset() { *m = EntityProto{} }
-func (m *EntityProto) String() string { return proto.CompactTextString(m) }
-func (*EntityProto) ProtoMessage() {}
-
-func (m *EntityProto) GetKey() *Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *EntityProto) GetEntityGroup() *Path {
- if m != nil {
- return m.EntityGroup
- }
- return nil
-}
-
-func (m *EntityProto) GetOwner() *User {
- if m != nil {
- return m.Owner
- }
- return nil
-}
-
-func (m *EntityProto) GetKind() EntityProto_Kind {
- if m != nil && m.Kind != nil {
- return *m.Kind
- }
- return EntityProto_GD_CONTACT
-}
-
-func (m *EntityProto) GetKindUri() string {
- if m != nil && m.KindUri != nil {
- return *m.KindUri
- }
- return ""
-}
-
-func (m *EntityProto) GetProperty() []*Property {
- if m != nil {
- return m.Property
- }
- return nil
-}
-
-func (m *EntityProto) GetRawProperty() []*Property {
- if m != nil {
- return m.RawProperty
- }
- return nil
-}
-
-func (m *EntityProto) GetRank() int32 {
- if m != nil && m.Rank != nil {
- return *m.Rank
- }
- return 0
-}
-
-type CompositeProperty struct {
- IndexId *int64 `protobuf:"varint,1,req,name=index_id" json:"index_id,omitempty"`
- Value []string `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompositeProperty) Reset() { *m = CompositeProperty{} }
-func (m *CompositeProperty) String() string { return proto.CompactTextString(m) }
-func (*CompositeProperty) ProtoMessage() {}
-
-func (m *CompositeProperty) GetIndexId() int64 {
- if m != nil && m.IndexId != nil {
- return *m.IndexId
- }
- return 0
-}
-
-func (m *CompositeProperty) GetValue() []string {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type Index struct {
- EntityType *string `protobuf:"bytes,1,req,name=entity_type" json:"entity_type,omitempty"`
- Ancestor *bool `protobuf:"varint,5,req,name=ancestor" json:"ancestor,omitempty"`
- Property []*Index_Property `protobuf:"group,2,rep,name=Property" json:"property,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Index) Reset() { *m = Index{} }
-func (m *Index) String() string { return proto.CompactTextString(m) }
-func (*Index) ProtoMessage() {}
-
-func (m *Index) GetEntityType() string {
- if m != nil && m.EntityType != nil {
- return *m.EntityType
- }
- return ""
-}
-
-func (m *Index) GetAncestor() bool {
- if m != nil && m.Ancestor != nil {
- return *m.Ancestor
- }
- return false
-}
-
-func (m *Index) GetProperty() []*Index_Property {
- if m != nil {
- return m.Property
- }
- return nil
-}
-
-type Index_Property struct {
- Name *string `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
- Direction *Index_Property_Direction `protobuf:"varint,4,opt,name=direction,enum=appengine.Index_Property_Direction,def=1" json:"direction,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Index_Property) Reset() { *m = Index_Property{} }
-func (m *Index_Property) String() string { return proto.CompactTextString(m) }
-func (*Index_Property) ProtoMessage() {}
-
-const Default_Index_Property_Direction Index_Property_Direction = Index_Property_ASCENDING
-
-func (m *Index_Property) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *Index_Property) GetDirection() Index_Property_Direction {
- if m != nil && m.Direction != nil {
- return *m.Direction
- }
- return Default_Index_Property_Direction
-}
-
-type CompositeIndex struct {
- AppId *string `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- Id *int64 `protobuf:"varint,2,req,name=id" json:"id,omitempty"`
- Definition *Index `protobuf:"bytes,3,req,name=definition" json:"definition,omitempty"`
- State *CompositeIndex_State `protobuf:"varint,4,req,name=state,enum=appengine.CompositeIndex_State" json:"state,omitempty"`
- OnlyUseIfRequired *bool `protobuf:"varint,6,opt,name=only_use_if_required,def=0" json:"only_use_if_required,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompositeIndex) Reset() { *m = CompositeIndex{} }
-func (m *CompositeIndex) String() string { return proto.CompactTextString(m) }
-func (*CompositeIndex) ProtoMessage() {}
-
-const Default_CompositeIndex_OnlyUseIfRequired bool = false
-
-func (m *CompositeIndex) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *CompositeIndex) GetId() int64 {
- if m != nil && m.Id != nil {
- return *m.Id
- }
- return 0
-}
-
-func (m *CompositeIndex) GetDefinition() *Index {
- if m != nil {
- return m.Definition
- }
- return nil
-}
-
-func (m *CompositeIndex) GetState() CompositeIndex_State {
- if m != nil && m.State != nil {
- return *m.State
- }
- return CompositeIndex_WRITE_ONLY
-}
-
-func (m *CompositeIndex) GetOnlyUseIfRequired() bool {
- if m != nil && m.OnlyUseIfRequired != nil {
- return *m.OnlyUseIfRequired
- }
- return Default_CompositeIndex_OnlyUseIfRequired
-}
-
-type IndexPostfix struct {
- IndexValue []*IndexPostfix_IndexValue `protobuf:"bytes,1,rep,name=index_value" json:"index_value,omitempty"`
- Key *Reference `protobuf:"bytes,2,opt,name=key" json:"key,omitempty"`
- Before *bool `protobuf:"varint,3,opt,name=before,def=1" json:"before,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexPostfix) Reset() { *m = IndexPostfix{} }
-func (m *IndexPostfix) String() string { return proto.CompactTextString(m) }
-func (*IndexPostfix) ProtoMessage() {}
-
-const Default_IndexPostfix_Before bool = true
-
-func (m *IndexPostfix) GetIndexValue() []*IndexPostfix_IndexValue {
- if m != nil {
- return m.IndexValue
- }
- return nil
-}
-
-func (m *IndexPostfix) GetKey() *Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *IndexPostfix) GetBefore() bool {
- if m != nil && m.Before != nil {
- return *m.Before
- }
- return Default_IndexPostfix_Before
-}
-
-type IndexPostfix_IndexValue struct {
- PropertyName *string `protobuf:"bytes,1,req,name=property_name" json:"property_name,omitempty"`
- Value *PropertyValue `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexPostfix_IndexValue) Reset() { *m = IndexPostfix_IndexValue{} }
-func (m *IndexPostfix_IndexValue) String() string { return proto.CompactTextString(m) }
-func (*IndexPostfix_IndexValue) ProtoMessage() {}
-
-func (m *IndexPostfix_IndexValue) GetPropertyName() string {
- if m != nil && m.PropertyName != nil {
- return *m.PropertyName
- }
- return ""
-}
-
-func (m *IndexPostfix_IndexValue) GetValue() *PropertyValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type IndexPosition struct {
- Key *string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"`
- Before *bool `protobuf:"varint,2,opt,name=before,def=1" json:"before,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexPosition) Reset() { *m = IndexPosition{} }
-func (m *IndexPosition) String() string { return proto.CompactTextString(m) }
-func (*IndexPosition) ProtoMessage() {}
-
-const Default_IndexPosition_Before bool = true
-
-func (m *IndexPosition) GetKey() string {
- if m != nil && m.Key != nil {
- return *m.Key
- }
- return ""
-}
-
-func (m *IndexPosition) GetBefore() bool {
- if m != nil && m.Before != nil {
- return *m.Before
- }
- return Default_IndexPosition_Before
-}
-
-type Snapshot struct {
- Ts *int64 `protobuf:"varint,1,req,name=ts" json:"ts,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Snapshot) Reset() { *m = Snapshot{} }
-func (m *Snapshot) String() string { return proto.CompactTextString(m) }
-func (*Snapshot) ProtoMessage() {}
-
-func (m *Snapshot) GetTs() int64 {
- if m != nil && m.Ts != nil {
- return *m.Ts
- }
- return 0
-}
-
-type InternalHeader struct {
- Qos *string `protobuf:"bytes,1,opt,name=qos" json:"qos,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *InternalHeader) Reset() { *m = InternalHeader{} }
-func (m *InternalHeader) String() string { return proto.CompactTextString(m) }
-func (*InternalHeader) ProtoMessage() {}
-
-func (m *InternalHeader) GetQos() string {
- if m != nil && m.Qos != nil {
- return *m.Qos
- }
- return ""
-}
-
-type Transaction struct {
- Header *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
- Handle *uint64 `protobuf:"fixed64,1,req,name=handle" json:"handle,omitempty"`
- App *string `protobuf:"bytes,2,req,name=app" json:"app,omitempty"`
- MarkChanges *bool `protobuf:"varint,3,opt,name=mark_changes,def=0" json:"mark_changes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Transaction) Reset() { *m = Transaction{} }
-func (m *Transaction) String() string { return proto.CompactTextString(m) }
-func (*Transaction) ProtoMessage() {}
-
-const Default_Transaction_MarkChanges bool = false
-
-func (m *Transaction) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *Transaction) GetHandle() uint64 {
- if m != nil && m.Handle != nil {
- return *m.Handle
- }
- return 0
-}
-
-func (m *Transaction) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-func (m *Transaction) GetMarkChanges() bool {
- if m != nil && m.MarkChanges != nil {
- return *m.MarkChanges
- }
- return Default_Transaction_MarkChanges
-}
-
-type Query struct {
- Header *InternalHeader `protobuf:"bytes,39,opt,name=header" json:"header,omitempty"`
- App *string `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
- NameSpace *string `protobuf:"bytes,29,opt,name=name_space" json:"name_space,omitempty"`
- Kind *string `protobuf:"bytes,3,opt,name=kind" json:"kind,omitempty"`
- Ancestor *Reference `protobuf:"bytes,17,opt,name=ancestor" json:"ancestor,omitempty"`
- Filter []*Query_Filter `protobuf:"group,4,rep,name=Filter" json:"filter,omitempty"`
- SearchQuery *string `protobuf:"bytes,8,opt,name=search_query" json:"search_query,omitempty"`
- Order []*Query_Order `protobuf:"group,9,rep,name=Order" json:"order,omitempty"`
- Hint *Query_Hint `protobuf:"varint,18,opt,name=hint,enum=appengine.Query_Hint" json:"hint,omitempty"`
- Count *int32 `protobuf:"varint,23,opt,name=count" json:"count,omitempty"`
- Offset *int32 `protobuf:"varint,12,opt,name=offset,def=0" json:"offset,omitempty"`
- Limit *int32 `protobuf:"varint,16,opt,name=limit" json:"limit,omitempty"`
- CompiledCursor *CompiledCursor `protobuf:"bytes,30,opt,name=compiled_cursor" json:"compiled_cursor,omitempty"`
- EndCompiledCursor *CompiledCursor `protobuf:"bytes,31,opt,name=end_compiled_cursor" json:"end_compiled_cursor,omitempty"`
- CompositeIndex []*CompositeIndex `protobuf:"bytes,19,rep,name=composite_index" json:"composite_index,omitempty"`
- RequirePerfectPlan *bool `protobuf:"varint,20,opt,name=require_perfect_plan,def=0" json:"require_perfect_plan,omitempty"`
- KeysOnly *bool `protobuf:"varint,21,opt,name=keys_only,def=0" json:"keys_only,omitempty"`
- Transaction *Transaction `protobuf:"bytes,22,opt,name=transaction" json:"transaction,omitempty"`
- Compile *bool `protobuf:"varint,25,opt,name=compile,def=0" json:"compile,omitempty"`
- FailoverMs *int64 `protobuf:"varint,26,opt,name=failover_ms" json:"failover_ms,omitempty"`
- Strong *bool `protobuf:"varint,32,opt,name=strong" json:"strong,omitempty"`
- PropertyName []string `protobuf:"bytes,33,rep,name=property_name" json:"property_name,omitempty"`
- GroupByPropertyName []string `protobuf:"bytes,34,rep,name=group_by_property_name" json:"group_by_property_name,omitempty"`
- Distinct *bool `protobuf:"varint,24,opt,name=distinct" json:"distinct,omitempty"`
- MinSafeTimeSeconds *int64 `protobuf:"varint,35,opt,name=min_safe_time_seconds" json:"min_safe_time_seconds,omitempty"`
- SafeReplicaName []string `protobuf:"bytes,36,rep,name=safe_replica_name" json:"safe_replica_name,omitempty"`
- PersistOffset *bool `protobuf:"varint,37,opt,name=persist_offset,def=0" json:"persist_offset,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Query) Reset() { *m = Query{} }
-func (m *Query) String() string { return proto.CompactTextString(m) }
-func (*Query) ProtoMessage() {}
-
-const Default_Query_Offset int32 = 0
-const Default_Query_RequirePerfectPlan bool = false
-const Default_Query_KeysOnly bool = false
-const Default_Query_Compile bool = false
-const Default_Query_PersistOffset bool = false
-
-func (m *Query) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *Query) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-func (m *Query) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *Query) GetKind() string {
- if m != nil && m.Kind != nil {
- return *m.Kind
- }
- return ""
-}
-
-func (m *Query) GetAncestor() *Reference {
- if m != nil {
- return m.Ancestor
- }
- return nil
-}
-
-func (m *Query) GetFilter() []*Query_Filter {
- if m != nil {
- return m.Filter
- }
- return nil
-}
-
-func (m *Query) GetSearchQuery() string {
- if m != nil && m.SearchQuery != nil {
- return *m.SearchQuery
- }
- return ""
-}
-
-func (m *Query) GetOrder() []*Query_Order {
- if m != nil {
- return m.Order
- }
- return nil
-}
-
-func (m *Query) GetHint() Query_Hint {
- if m != nil && m.Hint != nil {
- return *m.Hint
- }
- return Query_ORDER_FIRST
-}
-
-func (m *Query) GetCount() int32 {
- if m != nil && m.Count != nil {
- return *m.Count
- }
- return 0
-}
-
-func (m *Query) GetOffset() int32 {
- if m != nil && m.Offset != nil {
- return *m.Offset
- }
- return Default_Query_Offset
-}
-
-func (m *Query) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return 0
-}
-
-func (m *Query) GetCompiledCursor() *CompiledCursor {
- if m != nil {
- return m.CompiledCursor
- }
- return nil
-}
-
-func (m *Query) GetEndCompiledCursor() *CompiledCursor {
- if m != nil {
- return m.EndCompiledCursor
- }
- return nil
-}
-
-func (m *Query) GetCompositeIndex() []*CompositeIndex {
- if m != nil {
- return m.CompositeIndex
- }
- return nil
-}
-
-func (m *Query) GetRequirePerfectPlan() bool {
- if m != nil && m.RequirePerfectPlan != nil {
- return *m.RequirePerfectPlan
- }
- return Default_Query_RequirePerfectPlan
-}
-
-func (m *Query) GetKeysOnly() bool {
- if m != nil && m.KeysOnly != nil {
- return *m.KeysOnly
- }
- return Default_Query_KeysOnly
-}
-
-func (m *Query) GetTransaction() *Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *Query) GetCompile() bool {
- if m != nil && m.Compile != nil {
- return *m.Compile
- }
- return Default_Query_Compile
-}
-
-func (m *Query) GetFailoverMs() int64 {
- if m != nil && m.FailoverMs != nil {
- return *m.FailoverMs
- }
- return 0
-}
-
-func (m *Query) GetStrong() bool {
- if m != nil && m.Strong != nil {
- return *m.Strong
- }
- return false
-}
-
-func (m *Query) GetPropertyName() []string {
- if m != nil {
- return m.PropertyName
- }
- return nil
-}
-
-func (m *Query) GetGroupByPropertyName() []string {
- if m != nil {
- return m.GroupByPropertyName
- }
- return nil
-}
-
-func (m *Query) GetDistinct() bool {
- if m != nil && m.Distinct != nil {
- return *m.Distinct
- }
- return false
-}
-
-func (m *Query) GetMinSafeTimeSeconds() int64 {
- if m != nil && m.MinSafeTimeSeconds != nil {
- return *m.MinSafeTimeSeconds
- }
- return 0
-}
-
-func (m *Query) GetSafeReplicaName() []string {
- if m != nil {
- return m.SafeReplicaName
- }
- return nil
-}
-
-func (m *Query) GetPersistOffset() bool {
- if m != nil && m.PersistOffset != nil {
- return *m.PersistOffset
- }
- return Default_Query_PersistOffset
-}
-
-type Query_Filter struct {
- Op *Query_Filter_Operator `protobuf:"varint,6,req,name=op,enum=appengine.Query_Filter_Operator" json:"op,omitempty"`
- Property []*Property `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Query_Filter) Reset() { *m = Query_Filter{} }
-func (m *Query_Filter) String() string { return proto.CompactTextString(m) }
-func (*Query_Filter) ProtoMessage() {}
-
-func (m *Query_Filter) GetOp() Query_Filter_Operator {
- if m != nil && m.Op != nil {
- return *m.Op
- }
- return Query_Filter_LESS_THAN
-}
-
-func (m *Query_Filter) GetProperty() []*Property {
- if m != nil {
- return m.Property
- }
- return nil
-}
-
-type Query_Order struct {
- Property *string `protobuf:"bytes,10,req,name=property" json:"property,omitempty"`
- Direction *Query_Order_Direction `protobuf:"varint,11,opt,name=direction,enum=appengine.Query_Order_Direction,def=1" json:"direction,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Query_Order) Reset() { *m = Query_Order{} }
-func (m *Query_Order) String() string { return proto.CompactTextString(m) }
-func (*Query_Order) ProtoMessage() {}
-
-const Default_Query_Order_Direction Query_Order_Direction = Query_Order_ASCENDING
-
-func (m *Query_Order) GetProperty() string {
- if m != nil && m.Property != nil {
- return *m.Property
- }
- return ""
-}
-
-func (m *Query_Order) GetDirection() Query_Order_Direction {
- if m != nil && m.Direction != nil {
- return *m.Direction
- }
- return Default_Query_Order_Direction
-}
-
-type CompiledQuery struct {
- Primaryscan *CompiledQuery_PrimaryScan `protobuf:"group,1,req,name=PrimaryScan" json:"primaryscan,omitempty"`
- Mergejoinscan []*CompiledQuery_MergeJoinScan `protobuf:"group,7,rep,name=MergeJoinScan" json:"mergejoinscan,omitempty"`
- IndexDef *Index `protobuf:"bytes,21,opt,name=index_def" json:"index_def,omitempty"`
- Offset *int32 `protobuf:"varint,10,opt,name=offset,def=0" json:"offset,omitempty"`
- Limit *int32 `protobuf:"varint,11,opt,name=limit" json:"limit,omitempty"`
- KeysOnly *bool `protobuf:"varint,12,req,name=keys_only" json:"keys_only,omitempty"`
- PropertyName []string `protobuf:"bytes,24,rep,name=property_name" json:"property_name,omitempty"`
- DistinctInfixSize *int32 `protobuf:"varint,25,opt,name=distinct_infix_size" json:"distinct_infix_size,omitempty"`
- Entityfilter *CompiledQuery_EntityFilter `protobuf:"group,13,opt,name=EntityFilter" json:"entityfilter,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledQuery) Reset() { *m = CompiledQuery{} }
-func (m *CompiledQuery) String() string { return proto.CompactTextString(m) }
-func (*CompiledQuery) ProtoMessage() {}
-
-const Default_CompiledQuery_Offset int32 = 0
-
-func (m *CompiledQuery) GetPrimaryscan() *CompiledQuery_PrimaryScan {
- if m != nil {
- return m.Primaryscan
- }
- return nil
-}
-
-func (m *CompiledQuery) GetMergejoinscan() []*CompiledQuery_MergeJoinScan {
- if m != nil {
- return m.Mergejoinscan
- }
- return nil
-}
-
-func (m *CompiledQuery) GetIndexDef() *Index {
- if m != nil {
- return m.IndexDef
- }
- return nil
-}
-
-func (m *CompiledQuery) GetOffset() int32 {
- if m != nil && m.Offset != nil {
- return *m.Offset
- }
- return Default_CompiledQuery_Offset
-}
-
-func (m *CompiledQuery) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return 0
-}
-
-func (m *CompiledQuery) GetKeysOnly() bool {
- if m != nil && m.KeysOnly != nil {
- return *m.KeysOnly
- }
- return false
-}
-
-func (m *CompiledQuery) GetPropertyName() []string {
- if m != nil {
- return m.PropertyName
- }
- return nil
-}
-
-func (m *CompiledQuery) GetDistinctInfixSize() int32 {
- if m != nil && m.DistinctInfixSize != nil {
- return *m.DistinctInfixSize
- }
- return 0
-}
-
-func (m *CompiledQuery) GetEntityfilter() *CompiledQuery_EntityFilter {
- if m != nil {
- return m.Entityfilter
- }
- return nil
-}
-
-type CompiledQuery_PrimaryScan struct {
- IndexName *string `protobuf:"bytes,2,opt,name=index_name" json:"index_name,omitempty"`
- StartKey *string `protobuf:"bytes,3,opt,name=start_key" json:"start_key,omitempty"`
- StartInclusive *bool `protobuf:"varint,4,opt,name=start_inclusive" json:"start_inclusive,omitempty"`
- EndKey *string `protobuf:"bytes,5,opt,name=end_key" json:"end_key,omitempty"`
- EndInclusive *bool `protobuf:"varint,6,opt,name=end_inclusive" json:"end_inclusive,omitempty"`
- StartPostfixValue []string `protobuf:"bytes,22,rep,name=start_postfix_value" json:"start_postfix_value,omitempty"`
- EndPostfixValue []string `protobuf:"bytes,23,rep,name=end_postfix_value" json:"end_postfix_value,omitempty"`
- EndUnappliedLogTimestampUs *int64 `protobuf:"varint,19,opt,name=end_unapplied_log_timestamp_us" json:"end_unapplied_log_timestamp_us,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledQuery_PrimaryScan) Reset() { *m = CompiledQuery_PrimaryScan{} }
-func (m *CompiledQuery_PrimaryScan) String() string { return proto.CompactTextString(m) }
-func (*CompiledQuery_PrimaryScan) ProtoMessage() {}
-
-func (m *CompiledQuery_PrimaryScan) GetIndexName() string {
- if m != nil && m.IndexName != nil {
- return *m.IndexName
- }
- return ""
-}
-
-func (m *CompiledQuery_PrimaryScan) GetStartKey() string {
- if m != nil && m.StartKey != nil {
- return *m.StartKey
- }
- return ""
-}
-
-func (m *CompiledQuery_PrimaryScan) GetStartInclusive() bool {
- if m != nil && m.StartInclusive != nil {
- return *m.StartInclusive
- }
- return false
-}
-
-func (m *CompiledQuery_PrimaryScan) GetEndKey() string {
- if m != nil && m.EndKey != nil {
- return *m.EndKey
- }
- return ""
-}
-
-func (m *CompiledQuery_PrimaryScan) GetEndInclusive() bool {
- if m != nil && m.EndInclusive != nil {
- return *m.EndInclusive
- }
- return false
-}
-
-func (m *CompiledQuery_PrimaryScan) GetStartPostfixValue() []string {
- if m != nil {
- return m.StartPostfixValue
- }
- return nil
-}
-
-func (m *CompiledQuery_PrimaryScan) GetEndPostfixValue() []string {
- if m != nil {
- return m.EndPostfixValue
- }
- return nil
-}
-
-func (m *CompiledQuery_PrimaryScan) GetEndUnappliedLogTimestampUs() int64 {
- if m != nil && m.EndUnappliedLogTimestampUs != nil {
- return *m.EndUnappliedLogTimestampUs
- }
- return 0
-}
-
-type CompiledQuery_MergeJoinScan struct {
- IndexName *string `protobuf:"bytes,8,req,name=index_name" json:"index_name,omitempty"`
- PrefixValue []string `protobuf:"bytes,9,rep,name=prefix_value" json:"prefix_value,omitempty"`
- ValuePrefix *bool `protobuf:"varint,20,opt,name=value_prefix,def=0" json:"value_prefix,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledQuery_MergeJoinScan) Reset() { *m = CompiledQuery_MergeJoinScan{} }
-func (m *CompiledQuery_MergeJoinScan) String() string { return proto.CompactTextString(m) }
-func (*CompiledQuery_MergeJoinScan) ProtoMessage() {}
-
-const Default_CompiledQuery_MergeJoinScan_ValuePrefix bool = false
-
-func (m *CompiledQuery_MergeJoinScan) GetIndexName() string {
- if m != nil && m.IndexName != nil {
- return *m.IndexName
- }
- return ""
-}
-
-func (m *CompiledQuery_MergeJoinScan) GetPrefixValue() []string {
- if m != nil {
- return m.PrefixValue
- }
- return nil
-}
-
-func (m *CompiledQuery_MergeJoinScan) GetValuePrefix() bool {
- if m != nil && m.ValuePrefix != nil {
- return *m.ValuePrefix
- }
- return Default_CompiledQuery_MergeJoinScan_ValuePrefix
-}
-
-type CompiledQuery_EntityFilter struct {
- Distinct *bool `protobuf:"varint,14,opt,name=distinct,def=0" json:"distinct,omitempty"`
- Kind *string `protobuf:"bytes,17,opt,name=kind" json:"kind,omitempty"`
- Ancestor *Reference `protobuf:"bytes,18,opt,name=ancestor" json:"ancestor,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledQuery_EntityFilter) Reset() { *m = CompiledQuery_EntityFilter{} }
-func (m *CompiledQuery_EntityFilter) String() string { return proto.CompactTextString(m) }
-func (*CompiledQuery_EntityFilter) ProtoMessage() {}
-
-const Default_CompiledQuery_EntityFilter_Distinct bool = false
-
-func (m *CompiledQuery_EntityFilter) GetDistinct() bool {
- if m != nil && m.Distinct != nil {
- return *m.Distinct
- }
- return Default_CompiledQuery_EntityFilter_Distinct
-}
-
-func (m *CompiledQuery_EntityFilter) GetKind() string {
- if m != nil && m.Kind != nil {
- return *m.Kind
- }
- return ""
-}
-
-func (m *CompiledQuery_EntityFilter) GetAncestor() *Reference {
- if m != nil {
- return m.Ancestor
- }
- return nil
-}
-
-type CompiledCursor struct {
- Position *CompiledCursor_Position `protobuf:"group,2,opt,name=Position" json:"position,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledCursor) Reset() { *m = CompiledCursor{} }
-func (m *CompiledCursor) String() string { return proto.CompactTextString(m) }
-func (*CompiledCursor) ProtoMessage() {}
-
-func (m *CompiledCursor) GetPosition() *CompiledCursor_Position {
- if m != nil {
- return m.Position
- }
- return nil
-}
-
-type CompiledCursor_Position struct {
- StartKey *string `protobuf:"bytes,27,opt,name=start_key" json:"start_key,omitempty"`
- Indexvalue []*CompiledCursor_Position_IndexValue `protobuf:"group,29,rep,name=IndexValue" json:"indexvalue,omitempty"`
- Key *Reference `protobuf:"bytes,32,opt,name=key" json:"key,omitempty"`
- StartInclusive *bool `protobuf:"varint,28,opt,name=start_inclusive,def=1" json:"start_inclusive,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledCursor_Position) Reset() { *m = CompiledCursor_Position{} }
-func (m *CompiledCursor_Position) String() string { return proto.CompactTextString(m) }
-func (*CompiledCursor_Position) ProtoMessage() {}
-
-const Default_CompiledCursor_Position_StartInclusive bool = true
-
-func (m *CompiledCursor_Position) GetStartKey() string {
- if m != nil && m.StartKey != nil {
- return *m.StartKey
- }
- return ""
-}
-
-func (m *CompiledCursor_Position) GetIndexvalue() []*CompiledCursor_Position_IndexValue {
- if m != nil {
- return m.Indexvalue
- }
- return nil
-}
-
-func (m *CompiledCursor_Position) GetKey() *Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *CompiledCursor_Position) GetStartInclusive() bool {
- if m != nil && m.StartInclusive != nil {
- return *m.StartInclusive
- }
- return Default_CompiledCursor_Position_StartInclusive
-}
-
-type CompiledCursor_Position_IndexValue struct {
- Property *string `protobuf:"bytes,30,opt,name=property" json:"property,omitempty"`
- Value *PropertyValue `protobuf:"bytes,31,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompiledCursor_Position_IndexValue) Reset() { *m = CompiledCursor_Position_IndexValue{} }
-func (m *CompiledCursor_Position_IndexValue) String() string { return proto.CompactTextString(m) }
-func (*CompiledCursor_Position_IndexValue) ProtoMessage() {}
-
-func (m *CompiledCursor_Position_IndexValue) GetProperty() string {
- if m != nil && m.Property != nil {
- return *m.Property
- }
- return ""
-}
-
-func (m *CompiledCursor_Position_IndexValue) GetValue() *PropertyValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type Cursor struct {
- Cursor *uint64 `protobuf:"fixed64,1,req,name=cursor" json:"cursor,omitempty"`
- App *string `protobuf:"bytes,2,opt,name=app" json:"app,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Cursor) Reset() { *m = Cursor{} }
-func (m *Cursor) String() string { return proto.CompactTextString(m) }
-func (*Cursor) ProtoMessage() {}
-
-func (m *Cursor) GetCursor() uint64 {
- if m != nil && m.Cursor != nil {
- return *m.Cursor
- }
- return 0
-}
-
-func (m *Cursor) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-type Error struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Error) Reset() { *m = Error{} }
-func (m *Error) String() string { return proto.CompactTextString(m) }
-func (*Error) ProtoMessage() {}
-
-type Cost struct {
- IndexWrites *int32 `protobuf:"varint,1,opt,name=index_writes" json:"index_writes,omitempty"`
- IndexWriteBytes *int32 `protobuf:"varint,2,opt,name=index_write_bytes" json:"index_write_bytes,omitempty"`
- EntityWrites *int32 `protobuf:"varint,3,opt,name=entity_writes" json:"entity_writes,omitempty"`
- EntityWriteBytes *int32 `protobuf:"varint,4,opt,name=entity_write_bytes" json:"entity_write_bytes,omitempty"`
- Commitcost *Cost_CommitCost `protobuf:"group,5,opt,name=CommitCost" json:"commitcost,omitempty"`
- ApproximateStorageDelta *int32 `protobuf:"varint,8,opt,name=approximate_storage_delta" json:"approximate_storage_delta,omitempty"`
- IdSequenceUpdates *int32 `protobuf:"varint,9,opt,name=id_sequence_updates" json:"id_sequence_updates,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Cost) Reset() { *m = Cost{} }
-func (m *Cost) String() string { return proto.CompactTextString(m) }
-func (*Cost) ProtoMessage() {}
-
-func (m *Cost) GetIndexWrites() int32 {
- if m != nil && m.IndexWrites != nil {
- return *m.IndexWrites
- }
- return 0
-}
-
-func (m *Cost) GetIndexWriteBytes() int32 {
- if m != nil && m.IndexWriteBytes != nil {
- return *m.IndexWriteBytes
- }
- return 0
-}
-
-func (m *Cost) GetEntityWrites() int32 {
- if m != nil && m.EntityWrites != nil {
- return *m.EntityWrites
- }
- return 0
-}
-
-func (m *Cost) GetEntityWriteBytes() int32 {
- if m != nil && m.EntityWriteBytes != nil {
- return *m.EntityWriteBytes
- }
- return 0
-}
-
-func (m *Cost) GetCommitcost() *Cost_CommitCost {
- if m != nil {
- return m.Commitcost
- }
- return nil
-}
-
-func (m *Cost) GetApproximateStorageDelta() int32 {
- if m != nil && m.ApproximateStorageDelta != nil {
- return *m.ApproximateStorageDelta
- }
- return 0
-}
-
-func (m *Cost) GetIdSequenceUpdates() int32 {
- if m != nil && m.IdSequenceUpdates != nil {
- return *m.IdSequenceUpdates
- }
- return 0
-}
-
-type Cost_CommitCost struct {
- RequestedEntityPuts *int32 `protobuf:"varint,6,opt,name=requested_entity_puts" json:"requested_entity_puts,omitempty"`
- RequestedEntityDeletes *int32 `protobuf:"varint,7,opt,name=requested_entity_deletes" json:"requested_entity_deletes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Cost_CommitCost) Reset() { *m = Cost_CommitCost{} }
-func (m *Cost_CommitCost) String() string { return proto.CompactTextString(m) }
-func (*Cost_CommitCost) ProtoMessage() {}
-
-func (m *Cost_CommitCost) GetRequestedEntityPuts() int32 {
- if m != nil && m.RequestedEntityPuts != nil {
- return *m.RequestedEntityPuts
- }
- return 0
-}
-
-func (m *Cost_CommitCost) GetRequestedEntityDeletes() int32 {
- if m != nil && m.RequestedEntityDeletes != nil {
- return *m.RequestedEntityDeletes
- }
- return 0
-}
-
-type GetRequest struct {
- Header *InternalHeader `protobuf:"bytes,6,opt,name=header" json:"header,omitempty"`
- Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
- Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
- FailoverMs *int64 `protobuf:"varint,3,opt,name=failover_ms" json:"failover_ms,omitempty"`
- Strong *bool `protobuf:"varint,4,opt,name=strong" json:"strong,omitempty"`
- AllowDeferred *bool `protobuf:"varint,5,opt,name=allow_deferred,def=0" json:"allow_deferred,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetRequest) Reset() { *m = GetRequest{} }
-func (m *GetRequest) String() string { return proto.CompactTextString(m) }
-func (*GetRequest) ProtoMessage() {}
-
-const Default_GetRequest_AllowDeferred bool = false
-
-func (m *GetRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *GetRequest) GetKey() []*Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *GetRequest) GetTransaction() *Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *GetRequest) GetFailoverMs() int64 {
- if m != nil && m.FailoverMs != nil {
- return *m.FailoverMs
- }
- return 0
-}
-
-func (m *GetRequest) GetStrong() bool {
- if m != nil && m.Strong != nil {
- return *m.Strong
- }
- return false
-}
-
-func (m *GetRequest) GetAllowDeferred() bool {
- if m != nil && m.AllowDeferred != nil {
- return *m.AllowDeferred
- }
- return Default_GetRequest_AllowDeferred
-}
-
-type GetResponse struct {
- Entity []*GetResponse_Entity `protobuf:"group,1,rep,name=Entity" json:"entity,omitempty"`
- Deferred []*Reference `protobuf:"bytes,5,rep,name=deferred" json:"deferred,omitempty"`
- InOrder *bool `protobuf:"varint,6,opt,name=in_order,def=1" json:"in_order,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetResponse) Reset() { *m = GetResponse{} }
-func (m *GetResponse) String() string { return proto.CompactTextString(m) }
-func (*GetResponse) ProtoMessage() {}
-
-const Default_GetResponse_InOrder bool = true
-
-func (m *GetResponse) GetEntity() []*GetResponse_Entity {
- if m != nil {
- return m.Entity
- }
- return nil
-}
-
-func (m *GetResponse) GetDeferred() []*Reference {
- if m != nil {
- return m.Deferred
- }
- return nil
-}
-
-func (m *GetResponse) GetInOrder() bool {
- if m != nil && m.InOrder != nil {
- return *m.InOrder
- }
- return Default_GetResponse_InOrder
-}
-
-type GetResponse_Entity struct {
- Entity *EntityProto `protobuf:"bytes,2,opt,name=entity" json:"entity,omitempty"`
- Key *Reference `protobuf:"bytes,4,opt,name=key" json:"key,omitempty"`
- Version *int64 `protobuf:"varint,3,opt,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetResponse_Entity) Reset() { *m = GetResponse_Entity{} }
-func (m *GetResponse_Entity) String() string { return proto.CompactTextString(m) }
-func (*GetResponse_Entity) ProtoMessage() {}
-
-func (m *GetResponse_Entity) GetEntity() *EntityProto {
- if m != nil {
- return m.Entity
- }
- return nil
-}
-
-func (m *GetResponse_Entity) GetKey() *Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *GetResponse_Entity) GetVersion() int64 {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return 0
-}
-
-type PutRequest struct {
- Header *InternalHeader `protobuf:"bytes,11,opt,name=header" json:"header,omitempty"`
- Entity []*EntityProto `protobuf:"bytes,1,rep,name=entity" json:"entity,omitempty"`
- Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
- CompositeIndex []*CompositeIndex `protobuf:"bytes,3,rep,name=composite_index" json:"composite_index,omitempty"`
- Trusted *bool `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
- Force *bool `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
- MarkChanges *bool `protobuf:"varint,8,opt,name=mark_changes,def=0" json:"mark_changes,omitempty"`
- Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
- AutoIdPolicy *PutRequest_AutoIdPolicy `protobuf:"varint,10,opt,name=auto_id_policy,enum=appengine.PutRequest_AutoIdPolicy,def=0" json:"auto_id_policy,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PutRequest) Reset() { *m = PutRequest{} }
-func (m *PutRequest) String() string { return proto.CompactTextString(m) }
-func (*PutRequest) ProtoMessage() {}
-
-const Default_PutRequest_Trusted bool = false
-const Default_PutRequest_Force bool = false
-const Default_PutRequest_MarkChanges bool = false
-const Default_PutRequest_AutoIdPolicy PutRequest_AutoIdPolicy = PutRequest_CURRENT
-
-func (m *PutRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *PutRequest) GetEntity() []*EntityProto {
- if m != nil {
- return m.Entity
- }
- return nil
-}
-
-func (m *PutRequest) GetTransaction() *Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *PutRequest) GetCompositeIndex() []*CompositeIndex {
- if m != nil {
- return m.CompositeIndex
- }
- return nil
-}
-
-func (m *PutRequest) GetTrusted() bool {
- if m != nil && m.Trusted != nil {
- return *m.Trusted
- }
- return Default_PutRequest_Trusted
-}
-
-func (m *PutRequest) GetForce() bool {
- if m != nil && m.Force != nil {
- return *m.Force
- }
- return Default_PutRequest_Force
-}
-
-func (m *PutRequest) GetMarkChanges() bool {
- if m != nil && m.MarkChanges != nil {
- return *m.MarkChanges
- }
- return Default_PutRequest_MarkChanges
-}
-
-func (m *PutRequest) GetSnapshot() []*Snapshot {
- if m != nil {
- return m.Snapshot
- }
- return nil
-}
-
-func (m *PutRequest) GetAutoIdPolicy() PutRequest_AutoIdPolicy {
- if m != nil && m.AutoIdPolicy != nil {
- return *m.AutoIdPolicy
- }
- return Default_PutRequest_AutoIdPolicy
-}
-
-type PutResponse struct {
- Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
- Cost *Cost `protobuf:"bytes,2,opt,name=cost" json:"cost,omitempty"`
- Version []int64 `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PutResponse) Reset() { *m = PutResponse{} }
-func (m *PutResponse) String() string { return proto.CompactTextString(m) }
-func (*PutResponse) ProtoMessage() {}
-
-func (m *PutResponse) GetKey() []*Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *PutResponse) GetCost() *Cost {
- if m != nil {
- return m.Cost
- }
- return nil
-}
-
-func (m *PutResponse) GetVersion() []int64 {
- if m != nil {
- return m.Version
- }
- return nil
-}
-
-type TouchRequest struct {
- Header *InternalHeader `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
- Key []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
- CompositeIndex []*CompositeIndex `protobuf:"bytes,2,rep,name=composite_index" json:"composite_index,omitempty"`
- Force *bool `protobuf:"varint,3,opt,name=force,def=0" json:"force,omitempty"`
- Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TouchRequest) Reset() { *m = TouchRequest{} }
-func (m *TouchRequest) String() string { return proto.CompactTextString(m) }
-func (*TouchRequest) ProtoMessage() {}
-
-const Default_TouchRequest_Force bool = false
-
-func (m *TouchRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *TouchRequest) GetKey() []*Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *TouchRequest) GetCompositeIndex() []*CompositeIndex {
- if m != nil {
- return m.CompositeIndex
- }
- return nil
-}
-
-func (m *TouchRequest) GetForce() bool {
- if m != nil && m.Force != nil {
- return *m.Force
- }
- return Default_TouchRequest_Force
-}
-
-func (m *TouchRequest) GetSnapshot() []*Snapshot {
- if m != nil {
- return m.Snapshot
- }
- return nil
-}
-
-type TouchResponse struct {
- Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TouchResponse) Reset() { *m = TouchResponse{} }
-func (m *TouchResponse) String() string { return proto.CompactTextString(m) }
-func (*TouchResponse) ProtoMessage() {}
-
-func (m *TouchResponse) GetCost() *Cost {
- if m != nil {
- return m.Cost
- }
- return nil
-}
-
-type DeleteRequest struct {
- Header *InternalHeader `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
- Key []*Reference `protobuf:"bytes,6,rep,name=key" json:"key,omitempty"`
- Transaction *Transaction `protobuf:"bytes,5,opt,name=transaction" json:"transaction,omitempty"`
- Trusted *bool `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
- Force *bool `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
- MarkChanges *bool `protobuf:"varint,8,opt,name=mark_changes,def=0" json:"mark_changes,omitempty"`
- Snapshot []*Snapshot `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteRequest) Reset() { *m = DeleteRequest{} }
-func (m *DeleteRequest) String() string { return proto.CompactTextString(m) }
-func (*DeleteRequest) ProtoMessage() {}
-
-const Default_DeleteRequest_Trusted bool = false
-const Default_DeleteRequest_Force bool = false
-const Default_DeleteRequest_MarkChanges bool = false
-
-func (m *DeleteRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *DeleteRequest) GetKey() []*Reference {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *DeleteRequest) GetTransaction() *Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *DeleteRequest) GetTrusted() bool {
- if m != nil && m.Trusted != nil {
- return *m.Trusted
- }
- return Default_DeleteRequest_Trusted
-}
-
-func (m *DeleteRequest) GetForce() bool {
- if m != nil && m.Force != nil {
- return *m.Force
- }
- return Default_DeleteRequest_Force
-}
-
-func (m *DeleteRequest) GetMarkChanges() bool {
- if m != nil && m.MarkChanges != nil {
- return *m.MarkChanges
- }
- return Default_DeleteRequest_MarkChanges
-}
-
-func (m *DeleteRequest) GetSnapshot() []*Snapshot {
- if m != nil {
- return m.Snapshot
- }
- return nil
-}
-
-type DeleteResponse struct {
- Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
- Version []int64 `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteResponse) Reset() { *m = DeleteResponse{} }
-func (m *DeleteResponse) String() string { return proto.CompactTextString(m) }
-func (*DeleteResponse) ProtoMessage() {}
-
-func (m *DeleteResponse) GetCost() *Cost {
- if m != nil {
- return m.Cost
- }
- return nil
-}
-
-func (m *DeleteResponse) GetVersion() []int64 {
- if m != nil {
- return m.Version
- }
- return nil
-}
-
-type NextRequest struct {
- Header *InternalHeader `protobuf:"bytes,5,opt,name=header" json:"header,omitempty"`
- Cursor *Cursor `protobuf:"bytes,1,req,name=cursor" json:"cursor,omitempty"`
- Count *int32 `protobuf:"varint,2,opt,name=count" json:"count,omitempty"`
- Offset *int32 `protobuf:"varint,4,opt,name=offset,def=0" json:"offset,omitempty"`
- Compile *bool `protobuf:"varint,3,opt,name=compile,def=0" json:"compile,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *NextRequest) Reset() { *m = NextRequest{} }
-func (m *NextRequest) String() string { return proto.CompactTextString(m) }
-func (*NextRequest) ProtoMessage() {}
-
-const Default_NextRequest_Offset int32 = 0
-const Default_NextRequest_Compile bool = false
-
-func (m *NextRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *NextRequest) GetCursor() *Cursor {
- if m != nil {
- return m.Cursor
- }
- return nil
-}
-
-func (m *NextRequest) GetCount() int32 {
- if m != nil && m.Count != nil {
- return *m.Count
- }
- return 0
-}
-
-func (m *NextRequest) GetOffset() int32 {
- if m != nil && m.Offset != nil {
- return *m.Offset
- }
- return Default_NextRequest_Offset
-}
-
-func (m *NextRequest) GetCompile() bool {
- if m != nil && m.Compile != nil {
- return *m.Compile
- }
- return Default_NextRequest_Compile
-}
-
-type QueryResult struct {
- Cursor *Cursor `protobuf:"bytes,1,opt,name=cursor" json:"cursor,omitempty"`
- Result []*EntityProto `protobuf:"bytes,2,rep,name=result" json:"result,omitempty"`
- SkippedResults *int32 `protobuf:"varint,7,opt,name=skipped_results" json:"skipped_results,omitempty"`
- MoreResults *bool `protobuf:"varint,3,req,name=more_results" json:"more_results,omitempty"`
- KeysOnly *bool `protobuf:"varint,4,opt,name=keys_only" json:"keys_only,omitempty"`
- IndexOnly *bool `protobuf:"varint,9,opt,name=index_only" json:"index_only,omitempty"`
- SmallOps *bool `protobuf:"varint,10,opt,name=small_ops" json:"small_ops,omitempty"`
- CompiledQuery *CompiledQuery `protobuf:"bytes,5,opt,name=compiled_query" json:"compiled_query,omitempty"`
- CompiledCursor *CompiledCursor `protobuf:"bytes,6,opt,name=compiled_cursor" json:"compiled_cursor,omitempty"`
- Index []*CompositeIndex `protobuf:"bytes,8,rep,name=index" json:"index,omitempty"`
- Version []int64 `protobuf:"varint,11,rep,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *QueryResult) Reset() { *m = QueryResult{} }
-func (m *QueryResult) String() string { return proto.CompactTextString(m) }
-func (*QueryResult) ProtoMessage() {}
-
-func (m *QueryResult) GetCursor() *Cursor {
- if m != nil {
- return m.Cursor
- }
- return nil
-}
-
-func (m *QueryResult) GetResult() []*EntityProto {
- if m != nil {
- return m.Result
- }
- return nil
-}
-
-func (m *QueryResult) GetSkippedResults() int32 {
- if m != nil && m.SkippedResults != nil {
- return *m.SkippedResults
- }
- return 0
-}
-
-func (m *QueryResult) GetMoreResults() bool {
- if m != nil && m.MoreResults != nil {
- return *m.MoreResults
- }
- return false
-}
-
-func (m *QueryResult) GetKeysOnly() bool {
- if m != nil && m.KeysOnly != nil {
- return *m.KeysOnly
- }
- return false
-}
-
-func (m *QueryResult) GetIndexOnly() bool {
- if m != nil && m.IndexOnly != nil {
- return *m.IndexOnly
- }
- return false
-}
-
-func (m *QueryResult) GetSmallOps() bool {
- if m != nil && m.SmallOps != nil {
- return *m.SmallOps
- }
- return false
-}
-
-func (m *QueryResult) GetCompiledQuery() *CompiledQuery {
- if m != nil {
- return m.CompiledQuery
- }
- return nil
-}
-
-func (m *QueryResult) GetCompiledCursor() *CompiledCursor {
- if m != nil {
- return m.CompiledCursor
- }
- return nil
-}
-
-func (m *QueryResult) GetIndex() []*CompositeIndex {
- if m != nil {
- return m.Index
- }
- return nil
-}
-
-func (m *QueryResult) GetVersion() []int64 {
- if m != nil {
- return m.Version
- }
- return nil
-}
-
-type AllocateIdsRequest struct {
- Header *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
- ModelKey *Reference `protobuf:"bytes,1,opt,name=model_key" json:"model_key,omitempty"`
- Size *int64 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"`
- Max *int64 `protobuf:"varint,3,opt,name=max" json:"max,omitempty"`
- Reserve []*Reference `protobuf:"bytes,5,rep,name=reserve" json:"reserve,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AllocateIdsRequest) Reset() { *m = AllocateIdsRequest{} }
-func (m *AllocateIdsRequest) String() string { return proto.CompactTextString(m) }
-func (*AllocateIdsRequest) ProtoMessage() {}
-
-func (m *AllocateIdsRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *AllocateIdsRequest) GetModelKey() *Reference {
- if m != nil {
- return m.ModelKey
- }
- return nil
-}
-
-func (m *AllocateIdsRequest) GetSize() int64 {
- if m != nil && m.Size != nil {
- return *m.Size
- }
- return 0
-}
-
-func (m *AllocateIdsRequest) GetMax() int64 {
- if m != nil && m.Max != nil {
- return *m.Max
- }
- return 0
-}
-
-func (m *AllocateIdsRequest) GetReserve() []*Reference {
- if m != nil {
- return m.Reserve
- }
- return nil
-}
-
-type AllocateIdsResponse struct {
- Start *int64 `protobuf:"varint,1,req,name=start" json:"start,omitempty"`
- End *int64 `protobuf:"varint,2,req,name=end" json:"end,omitempty"`
- Cost *Cost `protobuf:"bytes,3,opt,name=cost" json:"cost,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AllocateIdsResponse) Reset() { *m = AllocateIdsResponse{} }
-func (m *AllocateIdsResponse) String() string { return proto.CompactTextString(m) }
-func (*AllocateIdsResponse) ProtoMessage() {}
-
-func (m *AllocateIdsResponse) GetStart() int64 {
- if m != nil && m.Start != nil {
- return *m.Start
- }
- return 0
-}
-
-func (m *AllocateIdsResponse) GetEnd() int64 {
- if m != nil && m.End != nil {
- return *m.End
- }
- return 0
-}
-
-func (m *AllocateIdsResponse) GetCost() *Cost {
- if m != nil {
- return m.Cost
- }
- return nil
-}
-
-type CompositeIndices struct {
- Index []*CompositeIndex `protobuf:"bytes,1,rep,name=index" json:"index,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompositeIndices) Reset() { *m = CompositeIndices{} }
-func (m *CompositeIndices) String() string { return proto.CompactTextString(m) }
-func (*CompositeIndices) ProtoMessage() {}
-
-func (m *CompositeIndices) GetIndex() []*CompositeIndex {
- if m != nil {
- return m.Index
- }
- return nil
-}
-
-type AddActionsRequest struct {
- Header *InternalHeader `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
- Transaction *Transaction `protobuf:"bytes,1,req,name=transaction" json:"transaction,omitempty"`
- Action []*Action `protobuf:"bytes,2,rep,name=action" json:"action,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AddActionsRequest) Reset() { *m = AddActionsRequest{} }
-func (m *AddActionsRequest) String() string { return proto.CompactTextString(m) }
-func (*AddActionsRequest) ProtoMessage() {}
-
-func (m *AddActionsRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *AddActionsRequest) GetTransaction() *Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *AddActionsRequest) GetAction() []*Action {
- if m != nil {
- return m.Action
- }
- return nil
-}
-
-type AddActionsResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AddActionsResponse) Reset() { *m = AddActionsResponse{} }
-func (m *AddActionsResponse) String() string { return proto.CompactTextString(m) }
-func (*AddActionsResponse) ProtoMessage() {}
-
-type BeginTransactionRequest struct {
- Header *InternalHeader `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
- App *string `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
- AllowMultipleEg *bool `protobuf:"varint,2,opt,name=allow_multiple_eg,def=0" json:"allow_multiple_eg,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} }
-func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) }
-func (*BeginTransactionRequest) ProtoMessage() {}
-
-const Default_BeginTransactionRequest_AllowMultipleEg bool = false
-
-func (m *BeginTransactionRequest) GetHeader() *InternalHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *BeginTransactionRequest) GetApp() string {
- if m != nil && m.App != nil {
- return *m.App
- }
- return ""
-}
-
-func (m *BeginTransactionRequest) GetAllowMultipleEg() bool {
- if m != nil && m.AllowMultipleEg != nil {
- return *m.AllowMultipleEg
- }
- return Default_BeginTransactionRequest_AllowMultipleEg
-}
-
-type CommitResponse struct {
- Cost *Cost `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
- Version []*CommitResponse_Version `protobuf:"group,3,rep,name=Version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CommitResponse) Reset() { *m = CommitResponse{} }
-func (m *CommitResponse) String() string { return proto.CompactTextString(m) }
-func (*CommitResponse) ProtoMessage() {}
-
-func (m *CommitResponse) GetCost() *Cost {
- if m != nil {
- return m.Cost
- }
- return nil
-}
-
-func (m *CommitResponse) GetVersion() []*CommitResponse_Version {
- if m != nil {
- return m.Version
- }
- return nil
-}
-
-type CommitResponse_Version struct {
- RootEntityKey *Reference `protobuf:"bytes,4,req,name=root_entity_key" json:"root_entity_key,omitempty"`
- Version *int64 `protobuf:"varint,5,req,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CommitResponse_Version) Reset() { *m = CommitResponse_Version{} }
-func (m *CommitResponse_Version) String() string { return proto.CompactTextString(m) }
-func (*CommitResponse_Version) ProtoMessage() {}
-
-func (m *CommitResponse_Version) GetRootEntityKey() *Reference {
- if m != nil {
- return m.RootEntityKey
- }
- return nil
-}
-
-func (m *CommitResponse_Version) GetVersion() int64 {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return 0
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
deleted file mode 100644
index e76f126..0000000
--- a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
+++ /dev/null
@@ -1,541 +0,0 @@
-syntax = "proto2";
-option go_package = "datastore";
-
-package appengine;
-
-message Action{}
-
-message PropertyValue {
- optional int64 int64Value = 1;
- optional bool booleanValue = 2;
- optional string stringValue = 3;
- optional double doubleValue = 4;
-
- optional group PointValue = 5 {
- required double x = 6;
- required double y = 7;
- }
-
- optional group UserValue = 8 {
- required string email = 9;
- required string auth_domain = 10;
- optional string nickname = 11;
- optional string federated_identity = 21;
- optional string federated_provider = 22;
- }
-
- optional group ReferenceValue = 12 {
- required string app = 13;
- optional string name_space = 20;
- repeated group PathElement = 14 {
- required string type = 15;
- optional int64 id = 16;
- optional string name = 17;
- }
- }
-}
-
-message Property {
- enum Meaning {
- NO_MEANING = 0;
- BLOB = 14;
- TEXT = 15;
- BYTESTRING = 16;
-
- ATOM_CATEGORY = 1;
- ATOM_LINK = 2;
- ATOM_TITLE = 3;
- ATOM_CONTENT = 4;
- ATOM_SUMMARY = 5;
- ATOM_AUTHOR = 6;
-
- GD_WHEN = 7;
- GD_EMAIL = 8;
- GEORSS_POINT = 9;
- GD_IM = 10;
-
- GD_PHONENUMBER = 11;
- GD_POSTALADDRESS = 12;
-
- GD_RATING = 13;
-
- BLOBKEY = 17;
- ENTITY_PROTO = 19;
-
- INDEX_VALUE = 18;
- };
-
- optional Meaning meaning = 1 [default = NO_MEANING];
- optional string meaning_uri = 2;
-
- required string name = 3;
-
- required PropertyValue value = 5;
-
- required bool multiple = 4;
-
- optional bool searchable = 6 [default=false];
-
- enum FtsTokenizationOption {
- HTML = 1;
- ATOM = 2;
- }
-
- optional FtsTokenizationOption fts_tokenization_option = 8;
-
- optional string locale = 9 [default = "en"];
-}
-
-message Path {
- repeated group Element = 1 {
- required string type = 2;
- optional int64 id = 3;
- optional string name = 4;
- }
-}
-
-message Reference {
- required string app = 13;
- optional string name_space = 20;
- required Path path = 14;
-}
-
-message User {
- required string email = 1;
- required string auth_domain = 2;
- optional string nickname = 3;
- optional string federated_identity = 6;
- optional string federated_provider = 7;
-}
-
-message EntityProto {
- required Reference key = 13;
- required Path entity_group = 16;
- optional User owner = 17;
-
- enum Kind {
- GD_CONTACT = 1;
- GD_EVENT = 2;
- GD_MESSAGE = 3;
- }
- optional Kind kind = 4;
- optional string kind_uri = 5;
-
- repeated Property property = 14;
- repeated Property raw_property = 15;
-
- optional int32 rank = 18;
-}
-
-message CompositeProperty {
- required int64 index_id = 1;
- repeated string value = 2;
-}
-
-message Index {
- required string entity_type = 1;
- required bool ancestor = 5;
- repeated group Property = 2 {
- required string name = 3;
- enum Direction {
- ASCENDING = 1;
- DESCENDING = 2;
- }
- optional Direction direction = 4 [default = ASCENDING];
- }
-}
-
-message CompositeIndex {
- required string app_id = 1;
- required int64 id = 2;
- required Index definition = 3;
-
- enum State {
- WRITE_ONLY = 1;
- READ_WRITE = 2;
- DELETED = 3;
- ERROR = 4;
- }
- required State state = 4;
-
- optional bool only_use_if_required = 6 [default = false];
-}
-
-message IndexPostfix {
- message IndexValue {
- required string property_name = 1;
- required PropertyValue value = 2;
- }
-
- repeated IndexValue index_value = 1;
-
- optional Reference key = 2;
-
- optional bool before = 3 [default=true];
-}
-
-message IndexPosition {
- optional string key = 1;
-
- optional bool before = 2 [default=true];
-}
-
-message Snapshot {
- enum Status {
- INACTIVE = 0;
- ACTIVE = 1;
- }
-
- required int64 ts = 1;
-}
-
-message InternalHeader {
- optional string qos = 1;
-}
-
-message Transaction {
- optional InternalHeader header = 4;
- required fixed64 handle = 1;
- required string app = 2;
- optional bool mark_changes = 3 [default = false];
-}
-
-message Query {
- optional InternalHeader header = 39;
-
- required string app = 1;
- optional string name_space = 29;
-
- optional string kind = 3;
- optional Reference ancestor = 17;
-
- repeated group Filter = 4 {
- enum Operator {
- LESS_THAN = 1;
- LESS_THAN_OR_EQUAL = 2;
- GREATER_THAN = 3;
- GREATER_THAN_OR_EQUAL = 4;
- EQUAL = 5;
- IN = 6;
- EXISTS = 7;
- }
-
- required Operator op = 6;
- repeated Property property = 14;
- }
-
- optional string search_query = 8;
-
- repeated group Order = 9 {
- enum Direction {
- ASCENDING = 1;
- DESCENDING = 2;
- }
-
- required string property = 10;
- optional Direction direction = 11 [default = ASCENDING];
- }
-
- enum Hint {
- ORDER_FIRST = 1;
- ANCESTOR_FIRST = 2;
- FILTER_FIRST = 3;
- }
- optional Hint hint = 18;
-
- optional int32 count = 23;
-
- optional int32 offset = 12 [default = 0];
-
- optional int32 limit = 16;
-
- optional CompiledCursor compiled_cursor = 30;
- optional CompiledCursor end_compiled_cursor = 31;
-
- repeated CompositeIndex composite_index = 19;
-
- optional bool require_perfect_plan = 20 [default = false];
-
- optional bool keys_only = 21 [default = false];
-
- optional Transaction transaction = 22;
-
- optional bool compile = 25 [default = false];
-
- optional int64 failover_ms = 26;
-
- optional bool strong = 32;
-
- repeated string property_name = 33;
-
- repeated string group_by_property_name = 34;
-
- optional bool distinct = 24;
-
- optional int64 min_safe_time_seconds = 35;
-
- repeated string safe_replica_name = 36;
-
- optional bool persist_offset = 37 [default=false];
-}
-
-message CompiledQuery {
- required group PrimaryScan = 1 {
- optional string index_name = 2;
-
- optional string start_key = 3;
- optional bool start_inclusive = 4;
- optional string end_key = 5;
- optional bool end_inclusive = 6;
-
- repeated string start_postfix_value = 22;
- repeated string end_postfix_value = 23;
-
- optional int64 end_unapplied_log_timestamp_us = 19;
- }
-
- repeated group MergeJoinScan = 7 {
- required string index_name = 8;
-
- repeated string prefix_value = 9;
-
- optional bool value_prefix = 20 [default=false];
- }
-
- optional Index index_def = 21;
-
- optional int32 offset = 10 [default = 0];
-
- optional int32 limit = 11;
-
- required bool keys_only = 12;
-
- repeated string property_name = 24;
-
- optional int32 distinct_infix_size = 25;
-
- optional group EntityFilter = 13 {
- optional bool distinct = 14 [default=false];
-
- optional string kind = 17;
- optional Reference ancestor = 18;
- }
-}
-
-message CompiledCursor {
- optional group Position = 2 {
- optional string start_key = 27;
-
- repeated group IndexValue = 29 {
- optional string property = 30;
- required PropertyValue value = 31;
- }
-
- optional Reference key = 32;
-
- optional bool start_inclusive = 28 [default=true];
- }
-}
-
-message Cursor {
- required fixed64 cursor = 1;
-
- optional string app = 2;
-}
-
-message Error {
- enum ErrorCode {
- BAD_REQUEST = 1;
- CONCURRENT_TRANSACTION = 2;
- INTERNAL_ERROR = 3;
- NEED_INDEX = 4;
- TIMEOUT = 5;
- PERMISSION_DENIED = 6;
- BIGTABLE_ERROR = 7;
- COMMITTED_BUT_STILL_APPLYING = 8;
- CAPABILITY_DISABLED = 9;
- TRY_ALTERNATE_BACKEND = 10;
- SAFE_TIME_TOO_OLD = 11;
- }
-}
-
-message Cost {
- optional int32 index_writes = 1;
- optional int32 index_write_bytes = 2;
- optional int32 entity_writes = 3;
- optional int32 entity_write_bytes = 4;
- optional group CommitCost = 5 {
- optional int32 requested_entity_puts = 6;
- optional int32 requested_entity_deletes = 7;
- };
- optional int32 approximate_storage_delta = 8;
- optional int32 id_sequence_updates = 9;
-}
-
-message GetRequest {
- optional InternalHeader header = 6;
-
- repeated Reference key = 1;
- optional Transaction transaction = 2;
-
- optional int64 failover_ms = 3;
-
- optional bool strong = 4;
-
- optional bool allow_deferred = 5 [default=false];
-}
-
-message GetResponse {
- repeated group Entity = 1 {
- optional EntityProto entity = 2;
- optional Reference key = 4;
-
- optional int64 version = 3;
- }
-
- repeated Reference deferred = 5;
-
- optional bool in_order = 6 [default=true];
-}
-
-message PutRequest {
- optional InternalHeader header = 11;
-
- repeated EntityProto entity = 1;
- optional Transaction transaction = 2;
- repeated CompositeIndex composite_index = 3;
-
- optional bool trusted = 4 [default = false];
-
- optional bool force = 7 [default = false];
-
- optional bool mark_changes = 8 [default = false];
- repeated Snapshot snapshot = 9;
-
- enum AutoIdPolicy {
- CURRENT = 0;
- SEQUENTIAL = 1;
- }
- optional AutoIdPolicy auto_id_policy = 10 [default = CURRENT];
-}
-
-message PutResponse {
- repeated Reference key = 1;
- optional Cost cost = 2;
- repeated int64 version = 3;
-}
-
-message TouchRequest {
- optional InternalHeader header = 10;
-
- repeated Reference key = 1;
- repeated CompositeIndex composite_index = 2;
- optional bool force = 3 [default = false];
- repeated Snapshot snapshot = 9;
-}
-
-message TouchResponse {
- optional Cost cost = 1;
-}
-
-message DeleteRequest {
- optional InternalHeader header = 10;
-
- repeated Reference key = 6;
- optional Transaction transaction = 5;
-
- optional bool trusted = 4 [default = false];
-
- optional bool force = 7 [default = false];
-
- optional bool mark_changes = 8 [default = false];
- repeated Snapshot snapshot = 9;
-}
-
-message DeleteResponse {
- optional Cost cost = 1;
- repeated int64 version = 3;
-}
-
-message NextRequest {
- optional InternalHeader header = 5;
-
- required Cursor cursor = 1;
- optional int32 count = 2;
-
- optional int32 offset = 4 [default = 0];
-
- optional bool compile = 3 [default = false];
-}
-
-message QueryResult {
- optional Cursor cursor = 1;
-
- repeated EntityProto result = 2;
-
- optional int32 skipped_results = 7;
-
- required bool more_results = 3;
-
- optional bool keys_only = 4;
-
- optional bool index_only = 9;
-
- optional bool small_ops = 10;
-
- optional CompiledQuery compiled_query = 5;
-
- optional CompiledCursor compiled_cursor = 6;
-
- repeated CompositeIndex index = 8;
-
- repeated int64 version = 11;
-}
-
-message AllocateIdsRequest {
- optional InternalHeader header = 4;
-
- optional Reference model_key = 1;
-
- optional int64 size = 2;
-
- optional int64 max = 3;
-
- repeated Reference reserve = 5;
-}
-
-message AllocateIdsResponse {
- required int64 start = 1;
- required int64 end = 2;
- optional Cost cost = 3;
-}
-
-message CompositeIndices {
- repeated CompositeIndex index = 1;
-}
-
-message AddActionsRequest {
- optional InternalHeader header = 3;
-
- required Transaction transaction = 1;
- repeated Action action = 2;
-}
-
-message AddActionsResponse {
-}
-
-message BeginTransactionRequest {
- optional InternalHeader header = 3;
-
- required string app = 1;
- optional bool allow_multiple_eg = 2 [default = false];
-}
-
-message CommitResponse {
- optional Cost cost = 1;
-
- repeated group Version = 3 {
- required Reference root_entity_key = 4;
- required int64 version = 5;
- }
-}
diff --git a/vendor/google.golang.org/appengine/internal/identity.go b/vendor/google.golang.org/appengine/internal/identity.go
deleted file mode 100644
index d538701..0000000
--- a/vendor/google.golang.org/appengine/internal/identity.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-import netcontext "golang.org/x/net/context"
-
-// These functions are implementations of the wrapper functions
-// in ../appengine/identity.go. See that file for commentary.
-
-func AppID(c netcontext.Context) string {
- return appID(FullyQualifiedAppID(c))
-}
diff --git a/vendor/google.golang.org/appengine/internal/identity_classic.go b/vendor/google.golang.org/appengine/internal/identity_classic.go
deleted file mode 100644
index e6b9227..0000000
--- a/vendor/google.golang.org/appengine/internal/identity_classic.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build appengine
-
-package internal
-
-import (
- "appengine"
-
- netcontext "golang.org/x/net/context"
-)
-
-func DefaultVersionHostname(ctx netcontext.Context) string {
- return appengine.DefaultVersionHostname(fromContext(ctx))
-}
-
-func RequestID(ctx netcontext.Context) string { return appengine.RequestID(fromContext(ctx)) }
-func Datacenter(_ netcontext.Context) string { return appengine.Datacenter() }
-func ServerSoftware() string { return appengine.ServerSoftware() }
-func ModuleName(ctx netcontext.Context) string { return appengine.ModuleName(fromContext(ctx)) }
-func VersionID(ctx netcontext.Context) string { return appengine.VersionID(fromContext(ctx)) }
-func InstanceID() string { return appengine.InstanceID() }
-func IsDevAppServer() bool { return appengine.IsDevAppServer() }
-
-func fullyQualifiedAppID(ctx netcontext.Context) string { return fromContext(ctx).FullyQualifiedAppID() }
diff --git a/vendor/google.golang.org/appengine/internal/identity_vm.go b/vendor/google.golang.org/appengine/internal/identity_vm.go
deleted file mode 100644
index ebe68b7..0000000
--- a/vendor/google.golang.org/appengine/internal/identity_vm.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package internal
-
-import (
- "net/http"
- "os"
-
- netcontext "golang.org/x/net/context"
-)
-
-// These functions are implementations of the wrapper functions
-// in ../appengine/identity.go. See that file for commentary.
-
-const (
- hDefaultVersionHostname = "X-AppEngine-Default-Version-Hostname"
- hRequestLogId = "X-AppEngine-Request-Log-Id"
- hDatacenter = "X-AppEngine-Datacenter"
-)
-
-func ctxHeaders(ctx netcontext.Context) http.Header {
- return fromContext(ctx).Request().Header
-}
-
-func DefaultVersionHostname(ctx netcontext.Context) string {
- return ctxHeaders(ctx).Get(hDefaultVersionHostname)
-}
-
-func RequestID(ctx netcontext.Context) string {
- return ctxHeaders(ctx).Get(hRequestLogId)
-}
-
-func Datacenter(ctx netcontext.Context) string {
- return ctxHeaders(ctx).Get(hDatacenter)
-}
-
-func ServerSoftware() string {
- // TODO(dsymonds): Remove fallback when we've verified this.
- if s := os.Getenv("SERVER_SOFTWARE"); s != "" {
- return s
- }
- return "Google App Engine/1.x.x"
-}
-
-// TODO(dsymonds): Remove the metadata fetches.
-
-func ModuleName(_ netcontext.Context) string {
- if s := os.Getenv("GAE_MODULE_NAME"); s != "" {
- return s
- }
- return string(mustGetMetadata("instance/attributes/gae_backend_name"))
-}
-
-func VersionID(_ netcontext.Context) string {
- if s1, s2 := os.Getenv("GAE_MODULE_VERSION"), os.Getenv("GAE_MINOR_VERSION"); s1 != "" && s2 != "" {
- return s1 + "." + s2
- }
- return string(mustGetMetadata("instance/attributes/gae_backend_version")) + "." + string(mustGetMetadata("instance/attributes/gae_backend_minor_version"))
-}
-
-func InstanceID() string {
- if s := os.Getenv("GAE_MODULE_INSTANCE"); s != "" {
- return s
- }
- return string(mustGetMetadata("instance/attributes/gae_backend_instance"))
-}
-
-func partitionlessAppID() string {
- // gae_project has everything except the partition prefix.
- appID := os.Getenv("GAE_LONG_APP_ID")
- if appID == "" {
- appID = string(mustGetMetadata("instance/attributes/gae_project"))
- }
- return appID
-}
-
-func fullyQualifiedAppID(_ netcontext.Context) string {
- appID := partitionlessAppID()
-
- part := os.Getenv("GAE_PARTITION")
- if part == "" {
- part = string(mustGetMetadata("instance/attributes/gae_partition"))
- }
-
- if part != "" {
- appID = part + "~" + appID
- }
- return appID
-}
-
-func IsDevAppServer() bool {
- return os.Getenv("RUN_WITH_DEVAPPSERVER") != ""
-}
diff --git a/vendor/google.golang.org/appengine/internal/image/images_service.pb.go b/vendor/google.golang.org/appengine/internal/image/images_service.pb.go
deleted file mode 100644
index ba7c722..0000000
--- a/vendor/google.golang.org/appengine/internal/image/images_service.pb.go
+++ /dev/null
@@ -1,845 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/image/images_service.proto
-// DO NOT EDIT!
-
-/*
-Package image is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/image/images_service.proto
-
-It has these top-level messages:
- ImagesServiceError
- ImagesServiceTransform
- Transform
- ImageData
- InputSettings
- OutputSettings
- ImagesTransformRequest
- ImagesTransformResponse
- CompositeImageOptions
- ImagesCanvas
- ImagesCompositeRequest
- ImagesCompositeResponse
- ImagesHistogramRequest
- ImagesHistogram
- ImagesHistogramResponse
- ImagesGetUrlBaseRequest
- ImagesGetUrlBaseResponse
- ImagesDeleteUrlBaseRequest
- ImagesDeleteUrlBaseResponse
-*/
-package image
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type ImagesServiceError_ErrorCode int32
-
-const (
- ImagesServiceError_UNSPECIFIED_ERROR ImagesServiceError_ErrorCode = 1
- ImagesServiceError_BAD_TRANSFORM_DATA ImagesServiceError_ErrorCode = 2
- ImagesServiceError_NOT_IMAGE ImagesServiceError_ErrorCode = 3
- ImagesServiceError_BAD_IMAGE_DATA ImagesServiceError_ErrorCode = 4
- ImagesServiceError_IMAGE_TOO_LARGE ImagesServiceError_ErrorCode = 5
- ImagesServiceError_INVALID_BLOB_KEY ImagesServiceError_ErrorCode = 6
- ImagesServiceError_ACCESS_DENIED ImagesServiceError_ErrorCode = 7
- ImagesServiceError_OBJECT_NOT_FOUND ImagesServiceError_ErrorCode = 8
-)
-
-var ImagesServiceError_ErrorCode_name = map[int32]string{
- 1: "UNSPECIFIED_ERROR",
- 2: "BAD_TRANSFORM_DATA",
- 3: "NOT_IMAGE",
- 4: "BAD_IMAGE_DATA",
- 5: "IMAGE_TOO_LARGE",
- 6: "INVALID_BLOB_KEY",
- 7: "ACCESS_DENIED",
- 8: "OBJECT_NOT_FOUND",
-}
-var ImagesServiceError_ErrorCode_value = map[string]int32{
- "UNSPECIFIED_ERROR": 1,
- "BAD_TRANSFORM_DATA": 2,
- "NOT_IMAGE": 3,
- "BAD_IMAGE_DATA": 4,
- "IMAGE_TOO_LARGE": 5,
- "INVALID_BLOB_KEY": 6,
- "ACCESS_DENIED": 7,
- "OBJECT_NOT_FOUND": 8,
-}
-
-func (x ImagesServiceError_ErrorCode) Enum() *ImagesServiceError_ErrorCode {
- p := new(ImagesServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x ImagesServiceError_ErrorCode) String() string {
- return proto.EnumName(ImagesServiceError_ErrorCode_name, int32(x))
-}
-func (x *ImagesServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ImagesServiceError_ErrorCode_value, data, "ImagesServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = ImagesServiceError_ErrorCode(value)
- return nil
-}
-
-type ImagesServiceTransform_Type int32
-
-const (
- ImagesServiceTransform_RESIZE ImagesServiceTransform_Type = 1
- ImagesServiceTransform_ROTATE ImagesServiceTransform_Type = 2
- ImagesServiceTransform_HORIZONTAL_FLIP ImagesServiceTransform_Type = 3
- ImagesServiceTransform_VERTICAL_FLIP ImagesServiceTransform_Type = 4
- ImagesServiceTransform_CROP ImagesServiceTransform_Type = 5
- ImagesServiceTransform_IM_FEELING_LUCKY ImagesServiceTransform_Type = 6
-)
-
-var ImagesServiceTransform_Type_name = map[int32]string{
- 1: "RESIZE",
- 2: "ROTATE",
- 3: "HORIZONTAL_FLIP",
- 4: "VERTICAL_FLIP",
- 5: "CROP",
- 6: "IM_FEELING_LUCKY",
-}
-var ImagesServiceTransform_Type_value = map[string]int32{
- "RESIZE": 1,
- "ROTATE": 2,
- "HORIZONTAL_FLIP": 3,
- "VERTICAL_FLIP": 4,
- "CROP": 5,
- "IM_FEELING_LUCKY": 6,
-}
-
-func (x ImagesServiceTransform_Type) Enum() *ImagesServiceTransform_Type {
- p := new(ImagesServiceTransform_Type)
- *p = x
- return p
-}
-func (x ImagesServiceTransform_Type) String() string {
- return proto.EnumName(ImagesServiceTransform_Type_name, int32(x))
-}
-func (x *ImagesServiceTransform_Type) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ImagesServiceTransform_Type_value, data, "ImagesServiceTransform_Type")
- if err != nil {
- return err
- }
- *x = ImagesServiceTransform_Type(value)
- return nil
-}
-
-type InputSettings_ORIENTATION_CORRECTION_TYPE int32
-
-const (
- InputSettings_UNCHANGED_ORIENTATION InputSettings_ORIENTATION_CORRECTION_TYPE = 0
- InputSettings_CORRECT_ORIENTATION InputSettings_ORIENTATION_CORRECTION_TYPE = 1
-)
-
-var InputSettings_ORIENTATION_CORRECTION_TYPE_name = map[int32]string{
- 0: "UNCHANGED_ORIENTATION",
- 1: "CORRECT_ORIENTATION",
-}
-var InputSettings_ORIENTATION_CORRECTION_TYPE_value = map[string]int32{
- "UNCHANGED_ORIENTATION": 0,
- "CORRECT_ORIENTATION": 1,
-}
-
-func (x InputSettings_ORIENTATION_CORRECTION_TYPE) Enum() *InputSettings_ORIENTATION_CORRECTION_TYPE {
- p := new(InputSettings_ORIENTATION_CORRECTION_TYPE)
- *p = x
- return p
-}
-func (x InputSettings_ORIENTATION_CORRECTION_TYPE) String() string {
- return proto.EnumName(InputSettings_ORIENTATION_CORRECTION_TYPE_name, int32(x))
-}
-func (x *InputSettings_ORIENTATION_CORRECTION_TYPE) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(InputSettings_ORIENTATION_CORRECTION_TYPE_value, data, "InputSettings_ORIENTATION_CORRECTION_TYPE")
- if err != nil {
- return err
- }
- *x = InputSettings_ORIENTATION_CORRECTION_TYPE(value)
- return nil
-}
-
-type OutputSettings_MIME_TYPE int32
-
-const (
- OutputSettings_PNG OutputSettings_MIME_TYPE = 0
- OutputSettings_JPEG OutputSettings_MIME_TYPE = 1
- OutputSettings_WEBP OutputSettings_MIME_TYPE = 2
-)
-
-var OutputSettings_MIME_TYPE_name = map[int32]string{
- 0: "PNG",
- 1: "JPEG",
- 2: "WEBP",
-}
-var OutputSettings_MIME_TYPE_value = map[string]int32{
- "PNG": 0,
- "JPEG": 1,
- "WEBP": 2,
-}
-
-func (x OutputSettings_MIME_TYPE) Enum() *OutputSettings_MIME_TYPE {
- p := new(OutputSettings_MIME_TYPE)
- *p = x
- return p
-}
-func (x OutputSettings_MIME_TYPE) String() string {
- return proto.EnumName(OutputSettings_MIME_TYPE_name, int32(x))
-}
-func (x *OutputSettings_MIME_TYPE) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(OutputSettings_MIME_TYPE_value, data, "OutputSettings_MIME_TYPE")
- if err != nil {
- return err
- }
- *x = OutputSettings_MIME_TYPE(value)
- return nil
-}
-
-type CompositeImageOptions_ANCHOR int32
-
-const (
- CompositeImageOptions_TOP_LEFT CompositeImageOptions_ANCHOR = 0
- CompositeImageOptions_TOP CompositeImageOptions_ANCHOR = 1
- CompositeImageOptions_TOP_RIGHT CompositeImageOptions_ANCHOR = 2
- CompositeImageOptions_LEFT CompositeImageOptions_ANCHOR = 3
- CompositeImageOptions_CENTER CompositeImageOptions_ANCHOR = 4
- CompositeImageOptions_RIGHT CompositeImageOptions_ANCHOR = 5
- CompositeImageOptions_BOTTOM_LEFT CompositeImageOptions_ANCHOR = 6
- CompositeImageOptions_BOTTOM CompositeImageOptions_ANCHOR = 7
- CompositeImageOptions_BOTTOM_RIGHT CompositeImageOptions_ANCHOR = 8
-)
-
-var CompositeImageOptions_ANCHOR_name = map[int32]string{
- 0: "TOP_LEFT",
- 1: "TOP",
- 2: "TOP_RIGHT",
- 3: "LEFT",
- 4: "CENTER",
- 5: "RIGHT",
- 6: "BOTTOM_LEFT",
- 7: "BOTTOM",
- 8: "BOTTOM_RIGHT",
-}
-var CompositeImageOptions_ANCHOR_value = map[string]int32{
- "TOP_LEFT": 0,
- "TOP": 1,
- "TOP_RIGHT": 2,
- "LEFT": 3,
- "CENTER": 4,
- "RIGHT": 5,
- "BOTTOM_LEFT": 6,
- "BOTTOM": 7,
- "BOTTOM_RIGHT": 8,
-}
-
-func (x CompositeImageOptions_ANCHOR) Enum() *CompositeImageOptions_ANCHOR {
- p := new(CompositeImageOptions_ANCHOR)
- *p = x
- return p
-}
-func (x CompositeImageOptions_ANCHOR) String() string {
- return proto.EnumName(CompositeImageOptions_ANCHOR_name, int32(x))
-}
-func (x *CompositeImageOptions_ANCHOR) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(CompositeImageOptions_ANCHOR_value, data, "CompositeImageOptions_ANCHOR")
- if err != nil {
- return err
- }
- *x = CompositeImageOptions_ANCHOR(value)
- return nil
-}
-
-type ImagesServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesServiceError) Reset() { *m = ImagesServiceError{} }
-func (m *ImagesServiceError) String() string { return proto.CompactTextString(m) }
-func (*ImagesServiceError) ProtoMessage() {}
-
-type ImagesServiceTransform struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesServiceTransform) Reset() { *m = ImagesServiceTransform{} }
-func (m *ImagesServiceTransform) String() string { return proto.CompactTextString(m) }
-func (*ImagesServiceTransform) ProtoMessage() {}
-
-type Transform struct {
- Width *int32 `protobuf:"varint,1,opt,name=width" json:"width,omitempty"`
- Height *int32 `protobuf:"varint,2,opt,name=height" json:"height,omitempty"`
- CropToFit *bool `protobuf:"varint,11,opt,name=crop_to_fit,def=0" json:"crop_to_fit,omitempty"`
- CropOffsetX *float32 `protobuf:"fixed32,12,opt,name=crop_offset_x,def=0.5" json:"crop_offset_x,omitempty"`
- CropOffsetY *float32 `protobuf:"fixed32,13,opt,name=crop_offset_y,def=0.5" json:"crop_offset_y,omitempty"`
- Rotate *int32 `protobuf:"varint,3,opt,name=rotate,def=0" json:"rotate,omitempty"`
- HorizontalFlip *bool `protobuf:"varint,4,opt,name=horizontal_flip,def=0" json:"horizontal_flip,omitempty"`
- VerticalFlip *bool `protobuf:"varint,5,opt,name=vertical_flip,def=0" json:"vertical_flip,omitempty"`
- CropLeftX *float32 `protobuf:"fixed32,6,opt,name=crop_left_x,def=0" json:"crop_left_x,omitempty"`
- CropTopY *float32 `protobuf:"fixed32,7,opt,name=crop_top_y,def=0" json:"crop_top_y,omitempty"`
- CropRightX *float32 `protobuf:"fixed32,8,opt,name=crop_right_x,def=1" json:"crop_right_x,omitempty"`
- CropBottomY *float32 `protobuf:"fixed32,9,opt,name=crop_bottom_y,def=1" json:"crop_bottom_y,omitempty"`
- Autolevels *bool `protobuf:"varint,10,opt,name=autolevels,def=0" json:"autolevels,omitempty"`
- AllowStretch *bool `protobuf:"varint,14,opt,name=allow_stretch,def=0" json:"allow_stretch,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Transform) Reset() { *m = Transform{} }
-func (m *Transform) String() string { return proto.CompactTextString(m) }
-func (*Transform) ProtoMessage() {}
-
-const Default_Transform_CropToFit bool = false
-const Default_Transform_CropOffsetX float32 = 0.5
-const Default_Transform_CropOffsetY float32 = 0.5
-const Default_Transform_Rotate int32 = 0
-const Default_Transform_HorizontalFlip bool = false
-const Default_Transform_VerticalFlip bool = false
-const Default_Transform_CropLeftX float32 = 0
-const Default_Transform_CropTopY float32 = 0
-const Default_Transform_CropRightX float32 = 1
-const Default_Transform_CropBottomY float32 = 1
-const Default_Transform_Autolevels bool = false
-const Default_Transform_AllowStretch bool = false
-
-func (m *Transform) GetWidth() int32 {
- if m != nil && m.Width != nil {
- return *m.Width
- }
- return 0
-}
-
-func (m *Transform) GetHeight() int32 {
- if m != nil && m.Height != nil {
- return *m.Height
- }
- return 0
-}
-
-func (m *Transform) GetCropToFit() bool {
- if m != nil && m.CropToFit != nil {
- return *m.CropToFit
- }
- return Default_Transform_CropToFit
-}
-
-func (m *Transform) GetCropOffsetX() float32 {
- if m != nil && m.CropOffsetX != nil {
- return *m.CropOffsetX
- }
- return Default_Transform_CropOffsetX
-}
-
-func (m *Transform) GetCropOffsetY() float32 {
- if m != nil && m.CropOffsetY != nil {
- return *m.CropOffsetY
- }
- return Default_Transform_CropOffsetY
-}
-
-func (m *Transform) GetRotate() int32 {
- if m != nil && m.Rotate != nil {
- return *m.Rotate
- }
- return Default_Transform_Rotate
-}
-
-func (m *Transform) GetHorizontalFlip() bool {
- if m != nil && m.HorizontalFlip != nil {
- return *m.HorizontalFlip
- }
- return Default_Transform_HorizontalFlip
-}
-
-func (m *Transform) GetVerticalFlip() bool {
- if m != nil && m.VerticalFlip != nil {
- return *m.VerticalFlip
- }
- return Default_Transform_VerticalFlip
-}
-
-func (m *Transform) GetCropLeftX() float32 {
- if m != nil && m.CropLeftX != nil {
- return *m.CropLeftX
- }
- return Default_Transform_CropLeftX
-}
-
-func (m *Transform) GetCropTopY() float32 {
- if m != nil && m.CropTopY != nil {
- return *m.CropTopY
- }
- return Default_Transform_CropTopY
-}
-
-func (m *Transform) GetCropRightX() float32 {
- if m != nil && m.CropRightX != nil {
- return *m.CropRightX
- }
- return Default_Transform_CropRightX
-}
-
-func (m *Transform) GetCropBottomY() float32 {
- if m != nil && m.CropBottomY != nil {
- return *m.CropBottomY
- }
- return Default_Transform_CropBottomY
-}
-
-func (m *Transform) GetAutolevels() bool {
- if m != nil && m.Autolevels != nil {
- return *m.Autolevels
- }
- return Default_Transform_Autolevels
-}
-
-func (m *Transform) GetAllowStretch() bool {
- if m != nil && m.AllowStretch != nil {
- return *m.AllowStretch
- }
- return Default_Transform_AllowStretch
-}
-
-type ImageData struct {
- Content []byte `protobuf:"bytes,1,req,name=content" json:"content,omitempty"`
- BlobKey *string `protobuf:"bytes,2,opt,name=blob_key" json:"blob_key,omitempty"`
- Width *int32 `protobuf:"varint,3,opt,name=width" json:"width,omitempty"`
- Height *int32 `protobuf:"varint,4,opt,name=height" json:"height,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImageData) Reset() { *m = ImageData{} }
-func (m *ImageData) String() string { return proto.CompactTextString(m) }
-func (*ImageData) ProtoMessage() {}
-
-func (m *ImageData) GetContent() []byte {
- if m != nil {
- return m.Content
- }
- return nil
-}
-
-func (m *ImageData) GetBlobKey() string {
- if m != nil && m.BlobKey != nil {
- return *m.BlobKey
- }
- return ""
-}
-
-func (m *ImageData) GetWidth() int32 {
- if m != nil && m.Width != nil {
- return *m.Width
- }
- return 0
-}
-
-func (m *ImageData) GetHeight() int32 {
- if m != nil && m.Height != nil {
- return *m.Height
- }
- return 0
-}
-
-type InputSettings struct {
- CorrectExifOrientation *InputSettings_ORIENTATION_CORRECTION_TYPE `protobuf:"varint,1,opt,name=correct_exif_orientation,enum=appengine.InputSettings_ORIENTATION_CORRECTION_TYPE,def=0" json:"correct_exif_orientation,omitempty"`
- ParseMetadata *bool `protobuf:"varint,2,opt,name=parse_metadata,def=0" json:"parse_metadata,omitempty"`
- TransparentSubstitutionRgb *int32 `protobuf:"varint,3,opt,name=transparent_substitution_rgb" json:"transparent_substitution_rgb,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *InputSettings) Reset() { *m = InputSettings{} }
-func (m *InputSettings) String() string { return proto.CompactTextString(m) }
-func (*InputSettings) ProtoMessage() {}
-
-const Default_InputSettings_CorrectExifOrientation InputSettings_ORIENTATION_CORRECTION_TYPE = InputSettings_UNCHANGED_ORIENTATION
-const Default_InputSettings_ParseMetadata bool = false
-
-func (m *InputSettings) GetCorrectExifOrientation() InputSettings_ORIENTATION_CORRECTION_TYPE {
- if m != nil && m.CorrectExifOrientation != nil {
- return *m.CorrectExifOrientation
- }
- return Default_InputSettings_CorrectExifOrientation
-}
-
-func (m *InputSettings) GetParseMetadata() bool {
- if m != nil && m.ParseMetadata != nil {
- return *m.ParseMetadata
- }
- return Default_InputSettings_ParseMetadata
-}
-
-func (m *InputSettings) GetTransparentSubstitutionRgb() int32 {
- if m != nil && m.TransparentSubstitutionRgb != nil {
- return *m.TransparentSubstitutionRgb
- }
- return 0
-}
-
-type OutputSettings struct {
- MimeType *OutputSettings_MIME_TYPE `protobuf:"varint,1,opt,name=mime_type,enum=appengine.OutputSettings_MIME_TYPE,def=0" json:"mime_type,omitempty"`
- Quality *int32 `protobuf:"varint,2,opt,name=quality" json:"quality,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *OutputSettings) Reset() { *m = OutputSettings{} }
-func (m *OutputSettings) String() string { return proto.CompactTextString(m) }
-func (*OutputSettings) ProtoMessage() {}
-
-const Default_OutputSettings_MimeType OutputSettings_MIME_TYPE = OutputSettings_PNG
-
-func (m *OutputSettings) GetMimeType() OutputSettings_MIME_TYPE {
- if m != nil && m.MimeType != nil {
- return *m.MimeType
- }
- return Default_OutputSettings_MimeType
-}
-
-func (m *OutputSettings) GetQuality() int32 {
- if m != nil && m.Quality != nil {
- return *m.Quality
- }
- return 0
-}
-
-type ImagesTransformRequest struct {
- Image *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
- Transform []*Transform `protobuf:"bytes,2,rep,name=transform" json:"transform,omitempty"`
- Output *OutputSettings `protobuf:"bytes,3,req,name=output" json:"output,omitempty"`
- Input *InputSettings `protobuf:"bytes,4,opt,name=input" json:"input,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesTransformRequest) Reset() { *m = ImagesTransformRequest{} }
-func (m *ImagesTransformRequest) String() string { return proto.CompactTextString(m) }
-func (*ImagesTransformRequest) ProtoMessage() {}
-
-func (m *ImagesTransformRequest) GetImage() *ImageData {
- if m != nil {
- return m.Image
- }
- return nil
-}
-
-func (m *ImagesTransformRequest) GetTransform() []*Transform {
- if m != nil {
- return m.Transform
- }
- return nil
-}
-
-func (m *ImagesTransformRequest) GetOutput() *OutputSettings {
- if m != nil {
- return m.Output
- }
- return nil
-}
-
-func (m *ImagesTransformRequest) GetInput() *InputSettings {
- if m != nil {
- return m.Input
- }
- return nil
-}
-
-type ImagesTransformResponse struct {
- Image *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
- SourceMetadata *string `protobuf:"bytes,2,opt,name=source_metadata" json:"source_metadata,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesTransformResponse) Reset() { *m = ImagesTransformResponse{} }
-func (m *ImagesTransformResponse) String() string { return proto.CompactTextString(m) }
-func (*ImagesTransformResponse) ProtoMessage() {}
-
-func (m *ImagesTransformResponse) GetImage() *ImageData {
- if m != nil {
- return m.Image
- }
- return nil
-}
-
-func (m *ImagesTransformResponse) GetSourceMetadata() string {
- if m != nil && m.SourceMetadata != nil {
- return *m.SourceMetadata
- }
- return ""
-}
-
-type CompositeImageOptions struct {
- SourceIndex *int32 `protobuf:"varint,1,req,name=source_index" json:"source_index,omitempty"`
- XOffset *int32 `protobuf:"varint,2,req,name=x_offset" json:"x_offset,omitempty"`
- YOffset *int32 `protobuf:"varint,3,req,name=y_offset" json:"y_offset,omitempty"`
- Opacity *float32 `protobuf:"fixed32,4,req,name=opacity" json:"opacity,omitempty"`
- Anchor *CompositeImageOptions_ANCHOR `protobuf:"varint,5,req,name=anchor,enum=appengine.CompositeImageOptions_ANCHOR" json:"anchor,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CompositeImageOptions) Reset() { *m = CompositeImageOptions{} }
-func (m *CompositeImageOptions) String() string { return proto.CompactTextString(m) }
-func (*CompositeImageOptions) ProtoMessage() {}
-
-func (m *CompositeImageOptions) GetSourceIndex() int32 {
- if m != nil && m.SourceIndex != nil {
- return *m.SourceIndex
- }
- return 0
-}
-
-func (m *CompositeImageOptions) GetXOffset() int32 {
- if m != nil && m.XOffset != nil {
- return *m.XOffset
- }
- return 0
-}
-
-func (m *CompositeImageOptions) GetYOffset() int32 {
- if m != nil && m.YOffset != nil {
- return *m.YOffset
- }
- return 0
-}
-
-func (m *CompositeImageOptions) GetOpacity() float32 {
- if m != nil && m.Opacity != nil {
- return *m.Opacity
- }
- return 0
-}
-
-func (m *CompositeImageOptions) GetAnchor() CompositeImageOptions_ANCHOR {
- if m != nil && m.Anchor != nil {
- return *m.Anchor
- }
- return CompositeImageOptions_TOP_LEFT
-}
-
-type ImagesCanvas struct {
- Width *int32 `protobuf:"varint,1,req,name=width" json:"width,omitempty"`
- Height *int32 `protobuf:"varint,2,req,name=height" json:"height,omitempty"`
- Output *OutputSettings `protobuf:"bytes,3,req,name=output" json:"output,omitempty"`
- Color *int32 `protobuf:"varint,4,opt,name=color,def=-1" json:"color,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesCanvas) Reset() { *m = ImagesCanvas{} }
-func (m *ImagesCanvas) String() string { return proto.CompactTextString(m) }
-func (*ImagesCanvas) ProtoMessage() {}
-
-const Default_ImagesCanvas_Color int32 = -1
-
-func (m *ImagesCanvas) GetWidth() int32 {
- if m != nil && m.Width != nil {
- return *m.Width
- }
- return 0
-}
-
-func (m *ImagesCanvas) GetHeight() int32 {
- if m != nil && m.Height != nil {
- return *m.Height
- }
- return 0
-}
-
-func (m *ImagesCanvas) GetOutput() *OutputSettings {
- if m != nil {
- return m.Output
- }
- return nil
-}
-
-func (m *ImagesCanvas) GetColor() int32 {
- if m != nil && m.Color != nil {
- return *m.Color
- }
- return Default_ImagesCanvas_Color
-}
-
-type ImagesCompositeRequest struct {
- Image []*ImageData `protobuf:"bytes,1,rep,name=image" json:"image,omitempty"`
- Options []*CompositeImageOptions `protobuf:"bytes,2,rep,name=options" json:"options,omitempty"`
- Canvas *ImagesCanvas `protobuf:"bytes,3,req,name=canvas" json:"canvas,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesCompositeRequest) Reset() { *m = ImagesCompositeRequest{} }
-func (m *ImagesCompositeRequest) String() string { return proto.CompactTextString(m) }
-func (*ImagesCompositeRequest) ProtoMessage() {}
-
-func (m *ImagesCompositeRequest) GetImage() []*ImageData {
- if m != nil {
- return m.Image
- }
- return nil
-}
-
-func (m *ImagesCompositeRequest) GetOptions() []*CompositeImageOptions {
- if m != nil {
- return m.Options
- }
- return nil
-}
-
-func (m *ImagesCompositeRequest) GetCanvas() *ImagesCanvas {
- if m != nil {
- return m.Canvas
- }
- return nil
-}
-
-type ImagesCompositeResponse struct {
- Image *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesCompositeResponse) Reset() { *m = ImagesCompositeResponse{} }
-func (m *ImagesCompositeResponse) String() string { return proto.CompactTextString(m) }
-func (*ImagesCompositeResponse) ProtoMessage() {}
-
-func (m *ImagesCompositeResponse) GetImage() *ImageData {
- if m != nil {
- return m.Image
- }
- return nil
-}
-
-type ImagesHistogramRequest struct {
- Image *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesHistogramRequest) Reset() { *m = ImagesHistogramRequest{} }
-func (m *ImagesHistogramRequest) String() string { return proto.CompactTextString(m) }
-func (*ImagesHistogramRequest) ProtoMessage() {}
-
-func (m *ImagesHistogramRequest) GetImage() *ImageData {
- if m != nil {
- return m.Image
- }
- return nil
-}
-
-type ImagesHistogram struct {
- Red []int32 `protobuf:"varint,1,rep,name=red" json:"red,omitempty"`
- Green []int32 `protobuf:"varint,2,rep,name=green" json:"green,omitempty"`
- Blue []int32 `protobuf:"varint,3,rep,name=blue" json:"blue,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesHistogram) Reset() { *m = ImagesHistogram{} }
-func (m *ImagesHistogram) String() string { return proto.CompactTextString(m) }
-func (*ImagesHistogram) ProtoMessage() {}
-
-func (m *ImagesHistogram) GetRed() []int32 {
- if m != nil {
- return m.Red
- }
- return nil
-}
-
-func (m *ImagesHistogram) GetGreen() []int32 {
- if m != nil {
- return m.Green
- }
- return nil
-}
-
-func (m *ImagesHistogram) GetBlue() []int32 {
- if m != nil {
- return m.Blue
- }
- return nil
-}
-
-type ImagesHistogramResponse struct {
- Histogram *ImagesHistogram `protobuf:"bytes,1,req,name=histogram" json:"histogram,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesHistogramResponse) Reset() { *m = ImagesHistogramResponse{} }
-func (m *ImagesHistogramResponse) String() string { return proto.CompactTextString(m) }
-func (*ImagesHistogramResponse) ProtoMessage() {}
-
-func (m *ImagesHistogramResponse) GetHistogram() *ImagesHistogram {
- if m != nil {
- return m.Histogram
- }
- return nil
-}
-
-type ImagesGetUrlBaseRequest struct {
- BlobKey *string `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- CreateSecureUrl *bool `protobuf:"varint,2,opt,name=create_secure_url,def=0" json:"create_secure_url,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesGetUrlBaseRequest) Reset() { *m = ImagesGetUrlBaseRequest{} }
-func (m *ImagesGetUrlBaseRequest) String() string { return proto.CompactTextString(m) }
-func (*ImagesGetUrlBaseRequest) ProtoMessage() {}
-
-const Default_ImagesGetUrlBaseRequest_CreateSecureUrl bool = false
-
-func (m *ImagesGetUrlBaseRequest) GetBlobKey() string {
- if m != nil && m.BlobKey != nil {
- return *m.BlobKey
- }
- return ""
-}
-
-func (m *ImagesGetUrlBaseRequest) GetCreateSecureUrl() bool {
- if m != nil && m.CreateSecureUrl != nil {
- return *m.CreateSecureUrl
- }
- return Default_ImagesGetUrlBaseRequest_CreateSecureUrl
-}
-
-type ImagesGetUrlBaseResponse struct {
- Url *string `protobuf:"bytes,1,req,name=url" json:"url,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesGetUrlBaseResponse) Reset() { *m = ImagesGetUrlBaseResponse{} }
-func (m *ImagesGetUrlBaseResponse) String() string { return proto.CompactTextString(m) }
-func (*ImagesGetUrlBaseResponse) ProtoMessage() {}
-
-func (m *ImagesGetUrlBaseResponse) GetUrl() string {
- if m != nil && m.Url != nil {
- return *m.Url
- }
- return ""
-}
-
-type ImagesDeleteUrlBaseRequest struct {
- BlobKey *string `protobuf:"bytes,1,req,name=blob_key" json:"blob_key,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesDeleteUrlBaseRequest) Reset() { *m = ImagesDeleteUrlBaseRequest{} }
-func (m *ImagesDeleteUrlBaseRequest) String() string { return proto.CompactTextString(m) }
-func (*ImagesDeleteUrlBaseRequest) ProtoMessage() {}
-
-func (m *ImagesDeleteUrlBaseRequest) GetBlobKey() string {
- if m != nil && m.BlobKey != nil {
- return *m.BlobKey
- }
- return ""
-}
-
-type ImagesDeleteUrlBaseResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ImagesDeleteUrlBaseResponse) Reset() { *m = ImagesDeleteUrlBaseResponse{} }
-func (m *ImagesDeleteUrlBaseResponse) String() string { return proto.CompactTextString(m) }
-func (*ImagesDeleteUrlBaseResponse) ProtoMessage() {}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/image/images_service.proto b/vendor/google.golang.org/appengine/internal/image/images_service.proto
deleted file mode 100644
index f0d2ed5..0000000
--- a/vendor/google.golang.org/appengine/internal/image/images_service.proto
+++ /dev/null
@@ -1,162 +0,0 @@
-syntax = "proto2";
-option go_package = "image";
-
-package appengine;
-
-message ImagesServiceError {
- enum ErrorCode {
- UNSPECIFIED_ERROR = 1;
- BAD_TRANSFORM_DATA = 2;
- NOT_IMAGE = 3;
- BAD_IMAGE_DATA = 4;
- IMAGE_TOO_LARGE = 5;
- INVALID_BLOB_KEY = 6;
- ACCESS_DENIED = 7;
- OBJECT_NOT_FOUND = 8;
- }
-}
-
-message ImagesServiceTransform {
- enum Type {
- RESIZE = 1;
- ROTATE = 2;
- HORIZONTAL_FLIP = 3;
- VERTICAL_FLIP = 4;
- CROP = 5;
- IM_FEELING_LUCKY = 6;
- }
-}
-
-message Transform {
- optional int32 width = 1;
- optional int32 height = 2;
- optional bool crop_to_fit = 11 [default = false];
- optional float crop_offset_x = 12 [default = 0.5];
- optional float crop_offset_y = 13 [default = 0.5];
-
- optional int32 rotate = 3 [default = 0];
-
- optional bool horizontal_flip = 4 [default = false];
-
- optional bool vertical_flip = 5 [default = false];
-
- optional float crop_left_x = 6 [default = 0.0];
- optional float crop_top_y = 7 [default = 0.0];
- optional float crop_right_x = 8 [default = 1.0];
- optional float crop_bottom_y = 9 [default = 1.0];
-
- optional bool autolevels = 10 [default = false];
-
- optional bool allow_stretch = 14 [default = false];
-}
-
-message ImageData {
- required bytes content = 1 [ctype=CORD];
- optional string blob_key = 2;
-
- optional int32 width = 3;
- optional int32 height = 4;
-}
-
-message InputSettings {
- enum ORIENTATION_CORRECTION_TYPE {
- UNCHANGED_ORIENTATION = 0;
- CORRECT_ORIENTATION = 1;
- }
- optional ORIENTATION_CORRECTION_TYPE correct_exif_orientation = 1
- [default=UNCHANGED_ORIENTATION];
- optional bool parse_metadata = 2 [default=false];
- optional int32 transparent_substitution_rgb = 3;
-}
-
-message OutputSettings {
- enum MIME_TYPE {
- PNG = 0;
- JPEG = 1;
- WEBP = 2;
- }
-
- optional MIME_TYPE mime_type = 1 [default=PNG];
- optional int32 quality = 2;
-}
-
-message ImagesTransformRequest {
- required ImageData image = 1;
- repeated Transform transform = 2;
- required OutputSettings output = 3;
- optional InputSettings input = 4;
-}
-
-message ImagesTransformResponse {
- required ImageData image = 1;
- optional string source_metadata = 2;
-}
-
-message CompositeImageOptions {
- required int32 source_index = 1;
- required int32 x_offset = 2;
- required int32 y_offset = 3;
- required float opacity = 4;
-
- enum ANCHOR {
- TOP_LEFT = 0;
- TOP = 1;
- TOP_RIGHT = 2;
- LEFT = 3;
- CENTER = 4;
- RIGHT = 5;
- BOTTOM_LEFT = 6;
- BOTTOM = 7;
- BOTTOM_RIGHT = 8;
- }
-
- required ANCHOR anchor = 5;
-}
-
-message ImagesCanvas {
- required int32 width = 1;
- required int32 height = 2;
- required OutputSettings output = 3;
- optional int32 color = 4 [default=-1];
-}
-
-message ImagesCompositeRequest {
- repeated ImageData image = 1;
- repeated CompositeImageOptions options = 2;
- required ImagesCanvas canvas = 3;
-}
-
-message ImagesCompositeResponse {
- required ImageData image = 1;
-}
-
-message ImagesHistogramRequest {
- required ImageData image = 1;
-}
-
-message ImagesHistogram {
- repeated int32 red = 1;
- repeated int32 green = 2;
- repeated int32 blue = 3;
-}
-
-message ImagesHistogramResponse {
- required ImagesHistogram histogram = 1;
-}
-
-message ImagesGetUrlBaseRequest {
- required string blob_key = 1;
-
- optional bool create_secure_url = 2 [default = false];
-}
-
-message ImagesGetUrlBaseResponse {
- required string url = 1;
-}
-
-message ImagesDeleteUrlBaseRequest {
- required string blob_key = 1;
-}
-
-message ImagesDeleteUrlBaseResponse {
-}
diff --git a/vendor/google.golang.org/appengine/internal/internal.go b/vendor/google.golang.org/appengine/internal/internal.go
deleted file mode 100644
index 66e8d76..0000000
--- a/vendor/google.golang.org/appengine/internal/internal.go
+++ /dev/null
@@ -1,144 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package internal provides support for package appengine.
-//
-// Programs should not use this package directly. Its API is not stable.
-// Use packages appengine and appengine/* instead.
-package internal
-
-import (
- "fmt"
- "io"
- "log"
- "net/http"
- "net/url"
- "os"
-
- "github.com/golang/protobuf/proto"
-
- remotepb "google.golang.org/appengine/internal/remote_api"
-)
-
-// errorCodeMaps is a map of service name to the error code map for the service.
-var errorCodeMaps = make(map[string]map[int32]string)
-
-// RegisterErrorCodeMap is called from API implementations to register their
-// error code map. This should only be called from init functions.
-func RegisterErrorCodeMap(service string, m map[int32]string) {
- errorCodeMaps[service] = m
-}
-
-type timeoutCodeKey struct {
- service string
- code int32
-}
-
-// timeoutCodes is the set of service+code pairs that represent timeouts.
-var timeoutCodes = make(map[timeoutCodeKey]bool)
-
-func RegisterTimeoutErrorCode(service string, code int32) {
- timeoutCodes[timeoutCodeKey{service, code}] = true
-}
-
-// APIError is the type returned by appengine.Context's Call method
-// when an API call fails in an API-specific way. This may be, for instance,
-// a taskqueue API call failing with TaskQueueServiceError::UNKNOWN_QUEUE.
-type APIError struct {
- Service string
- Detail string
- Code int32 // API-specific error code
-}
-
-func (e *APIError) Error() string {
- if e.Code == 0 {
- if e.Detail == "" {
- return "APIError "
- }
- return e.Detail
- }
- s := fmt.Sprintf("API error %d", e.Code)
- if m, ok := errorCodeMaps[e.Service]; ok {
- s += " (" + e.Service + ": " + m[e.Code] + ")"
- } else {
- // Shouldn't happen, but provide a bit more detail if it does.
- s = e.Service + " " + s
- }
- if e.Detail != "" {
- s += ": " + e.Detail
- }
- return s
-}
-
-func (e *APIError) IsTimeout() bool {
- return timeoutCodes[timeoutCodeKey{e.Service, e.Code}]
-}
-
-// CallError is the type returned by appengine.Context's Call method when an
-// API call fails in a generic way, such as RpcError::CAPABILITY_DISABLED.
-type CallError struct {
- Detail string
- Code int32
- // TODO: Remove this if we get a distinguishable error code.
- Timeout bool
-}
-
-func (e *CallError) Error() string {
- var msg string
- switch remotepb.RpcError_ErrorCode(e.Code) {
- case remotepb.RpcError_UNKNOWN:
- return e.Detail
- case remotepb.RpcError_OVER_QUOTA:
- msg = "Over quota"
- case remotepb.RpcError_CAPABILITY_DISABLED:
- msg = "Capability disabled"
- case remotepb.RpcError_CANCELLED:
- msg = "Canceled"
- default:
- msg = fmt.Sprintf("Call error %d", e.Code)
- }
- s := msg + ": " + e.Detail
- if e.Timeout {
- s += " (timeout)"
- }
- return s
-}
-
-func (e *CallError) IsTimeout() bool {
- return e.Timeout
-}
-
-func Main() {
- installHealthChecker(http.DefaultServeMux)
-
- port := "8080"
- if s := os.Getenv("PORT"); s != "" {
- port = s
- }
-
- if err := http.ListenAndServe(":"+port, http.HandlerFunc(handleHTTP)); err != nil {
- log.Fatalf("http.ListenAndServe: %v", err)
- }
-}
-
-func installHealthChecker(mux *http.ServeMux) {
- // If no health check handler has been installed by this point, add a trivial one.
- const healthPath = "/_ah/health"
- hreq := &http.Request{
- Method: "GET",
- URL: &url.URL{
- Path: healthPath,
- },
- }
- if _, pat := mux.Handler(hreq); pat != healthPath {
- mux.HandleFunc(healthPath, func(w http.ResponseWriter, r *http.Request) {
- io.WriteString(w, "ok")
- })
- }
-}
-
-// NamespaceMods is a map from API service to a function that will mutate an RPC request to attach a namespace.
-// The function should be prepared to be called on the same message more than once; it should only modify the
-// RPC request the first time.
-var NamespaceMods = make(map[string]func(m proto.Message, namespace string))
diff --git a/vendor/google.golang.org/appengine/internal/log/log_service.pb.go b/vendor/google.golang.org/appengine/internal/log/log_service.pb.go
deleted file mode 100644
index 20c595b..0000000
--- a/vendor/google.golang.org/appengine/internal/log/log_service.pb.go
+++ /dev/null
@@ -1,899 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/log/log_service.proto
-// DO NOT EDIT!
-
-/*
-Package log is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/log/log_service.proto
-
-It has these top-level messages:
- LogServiceError
- UserAppLogLine
- UserAppLogGroup
- FlushRequest
- SetStatusRequest
- LogOffset
- LogLine
- RequestLog
- LogModuleVersion
- LogReadRequest
- LogReadResponse
- LogUsageRecord
- LogUsageRequest
- LogUsageResponse
-*/
-package log
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type LogServiceError_ErrorCode int32
-
-const (
- LogServiceError_OK LogServiceError_ErrorCode = 0
- LogServiceError_INVALID_REQUEST LogServiceError_ErrorCode = 1
- LogServiceError_STORAGE_ERROR LogServiceError_ErrorCode = 2
-)
-
-var LogServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INVALID_REQUEST",
- 2: "STORAGE_ERROR",
-}
-var LogServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INVALID_REQUEST": 1,
- "STORAGE_ERROR": 2,
-}
-
-func (x LogServiceError_ErrorCode) Enum() *LogServiceError_ErrorCode {
- p := new(LogServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x LogServiceError_ErrorCode) String() string {
- return proto.EnumName(LogServiceError_ErrorCode_name, int32(x))
-}
-func (x *LogServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(LogServiceError_ErrorCode_value, data, "LogServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = LogServiceError_ErrorCode(value)
- return nil
-}
-
-type LogServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogServiceError) Reset() { *m = LogServiceError{} }
-func (m *LogServiceError) String() string { return proto.CompactTextString(m) }
-func (*LogServiceError) ProtoMessage() {}
-
-type UserAppLogLine struct {
- TimestampUsec *int64 `protobuf:"varint,1,req,name=timestamp_usec" json:"timestamp_usec,omitempty"`
- Level *int64 `protobuf:"varint,2,req,name=level" json:"level,omitempty"`
- Message *string `protobuf:"bytes,3,req,name=message" json:"message,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *UserAppLogLine) Reset() { *m = UserAppLogLine{} }
-func (m *UserAppLogLine) String() string { return proto.CompactTextString(m) }
-func (*UserAppLogLine) ProtoMessage() {}
-
-func (m *UserAppLogLine) GetTimestampUsec() int64 {
- if m != nil && m.TimestampUsec != nil {
- return *m.TimestampUsec
- }
- return 0
-}
-
-func (m *UserAppLogLine) GetLevel() int64 {
- if m != nil && m.Level != nil {
- return *m.Level
- }
- return 0
-}
-
-func (m *UserAppLogLine) GetMessage() string {
- if m != nil && m.Message != nil {
- return *m.Message
- }
- return ""
-}
-
-type UserAppLogGroup struct {
- LogLine []*UserAppLogLine `protobuf:"bytes,2,rep,name=log_line" json:"log_line,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *UserAppLogGroup) Reset() { *m = UserAppLogGroup{} }
-func (m *UserAppLogGroup) String() string { return proto.CompactTextString(m) }
-func (*UserAppLogGroup) ProtoMessage() {}
-
-func (m *UserAppLogGroup) GetLogLine() []*UserAppLogLine {
- if m != nil {
- return m.LogLine
- }
- return nil
-}
-
-type FlushRequest struct {
- Logs []byte `protobuf:"bytes,1,opt,name=logs" json:"logs,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FlushRequest) Reset() { *m = FlushRequest{} }
-func (m *FlushRequest) String() string { return proto.CompactTextString(m) }
-func (*FlushRequest) ProtoMessage() {}
-
-func (m *FlushRequest) GetLogs() []byte {
- if m != nil {
- return m.Logs
- }
- return nil
-}
-
-type SetStatusRequest struct {
- Status *string `protobuf:"bytes,1,req,name=status" json:"status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SetStatusRequest) Reset() { *m = SetStatusRequest{} }
-func (m *SetStatusRequest) String() string { return proto.CompactTextString(m) }
-func (*SetStatusRequest) ProtoMessage() {}
-
-func (m *SetStatusRequest) GetStatus() string {
- if m != nil && m.Status != nil {
- return *m.Status
- }
- return ""
-}
-
-type LogOffset struct {
- RequestId []byte `protobuf:"bytes,1,opt,name=request_id" json:"request_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogOffset) Reset() { *m = LogOffset{} }
-func (m *LogOffset) String() string { return proto.CompactTextString(m) }
-func (*LogOffset) ProtoMessage() {}
-
-func (m *LogOffset) GetRequestId() []byte {
- if m != nil {
- return m.RequestId
- }
- return nil
-}
-
-type LogLine struct {
- Time *int64 `protobuf:"varint,1,req,name=time" json:"time,omitempty"`
- Level *int32 `protobuf:"varint,2,req,name=level" json:"level,omitempty"`
- LogMessage *string `protobuf:"bytes,3,req,name=log_message" json:"log_message,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogLine) Reset() { *m = LogLine{} }
-func (m *LogLine) String() string { return proto.CompactTextString(m) }
-func (*LogLine) ProtoMessage() {}
-
-func (m *LogLine) GetTime() int64 {
- if m != nil && m.Time != nil {
- return *m.Time
- }
- return 0
-}
-
-func (m *LogLine) GetLevel() int32 {
- if m != nil && m.Level != nil {
- return *m.Level
- }
- return 0
-}
-
-func (m *LogLine) GetLogMessage() string {
- if m != nil && m.LogMessage != nil {
- return *m.LogMessage
- }
- return ""
-}
-
-type RequestLog struct {
- AppId *string `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- ModuleId *string `protobuf:"bytes,37,opt,name=module_id,def=default" json:"module_id,omitempty"`
- VersionId *string `protobuf:"bytes,2,req,name=version_id" json:"version_id,omitempty"`
- RequestId []byte `protobuf:"bytes,3,req,name=request_id" json:"request_id,omitempty"`
- Offset *LogOffset `protobuf:"bytes,35,opt,name=offset" json:"offset,omitempty"`
- Ip *string `protobuf:"bytes,4,req,name=ip" json:"ip,omitempty"`
- Nickname *string `protobuf:"bytes,5,opt,name=nickname" json:"nickname,omitempty"`
- StartTime *int64 `protobuf:"varint,6,req,name=start_time" json:"start_time,omitempty"`
- EndTime *int64 `protobuf:"varint,7,req,name=end_time" json:"end_time,omitempty"`
- Latency *int64 `protobuf:"varint,8,req,name=latency" json:"latency,omitempty"`
- Mcycles *int64 `protobuf:"varint,9,req,name=mcycles" json:"mcycles,omitempty"`
- Method *string `protobuf:"bytes,10,req,name=method" json:"method,omitempty"`
- Resource *string `protobuf:"bytes,11,req,name=resource" json:"resource,omitempty"`
- HttpVersion *string `protobuf:"bytes,12,req,name=http_version" json:"http_version,omitempty"`
- Status *int32 `protobuf:"varint,13,req,name=status" json:"status,omitempty"`
- ResponseSize *int64 `protobuf:"varint,14,req,name=response_size" json:"response_size,omitempty"`
- Referrer *string `protobuf:"bytes,15,opt,name=referrer" json:"referrer,omitempty"`
- UserAgent *string `protobuf:"bytes,16,opt,name=user_agent" json:"user_agent,omitempty"`
- UrlMapEntry *string `protobuf:"bytes,17,req,name=url_map_entry" json:"url_map_entry,omitempty"`
- Combined *string `protobuf:"bytes,18,req,name=combined" json:"combined,omitempty"`
- ApiMcycles *int64 `protobuf:"varint,19,opt,name=api_mcycles" json:"api_mcycles,omitempty"`
- Host *string `protobuf:"bytes,20,opt,name=host" json:"host,omitempty"`
- Cost *float64 `protobuf:"fixed64,21,opt,name=cost" json:"cost,omitempty"`
- TaskQueueName *string `protobuf:"bytes,22,opt,name=task_queue_name" json:"task_queue_name,omitempty"`
- TaskName *string `protobuf:"bytes,23,opt,name=task_name" json:"task_name,omitempty"`
- WasLoadingRequest *bool `protobuf:"varint,24,opt,name=was_loading_request" json:"was_loading_request,omitempty"`
- PendingTime *int64 `protobuf:"varint,25,opt,name=pending_time" json:"pending_time,omitempty"`
- ReplicaIndex *int32 `protobuf:"varint,26,opt,name=replica_index,def=-1" json:"replica_index,omitempty"`
- Finished *bool `protobuf:"varint,27,opt,name=finished,def=1" json:"finished,omitempty"`
- CloneKey []byte `protobuf:"bytes,28,opt,name=clone_key" json:"clone_key,omitempty"`
- Line []*LogLine `protobuf:"bytes,29,rep,name=line" json:"line,omitempty"`
- LinesIncomplete *bool `protobuf:"varint,36,opt,name=lines_incomplete" json:"lines_incomplete,omitempty"`
- AppEngineRelease []byte `protobuf:"bytes,38,opt,name=app_engine_release" json:"app_engine_release,omitempty"`
- ExitReason *int32 `protobuf:"varint,30,opt,name=exit_reason" json:"exit_reason,omitempty"`
- WasThrottledForTime *bool `protobuf:"varint,31,opt,name=was_throttled_for_time" json:"was_throttled_for_time,omitempty"`
- WasThrottledForRequests *bool `protobuf:"varint,32,opt,name=was_throttled_for_requests" json:"was_throttled_for_requests,omitempty"`
- ThrottledTime *int64 `protobuf:"varint,33,opt,name=throttled_time" json:"throttled_time,omitempty"`
- ServerName []byte `protobuf:"bytes,34,opt,name=server_name" json:"server_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *RequestLog) Reset() { *m = RequestLog{} }
-func (m *RequestLog) String() string { return proto.CompactTextString(m) }
-func (*RequestLog) ProtoMessage() {}
-
-const Default_RequestLog_ModuleId string = "default"
-const Default_RequestLog_ReplicaIndex int32 = -1
-const Default_RequestLog_Finished bool = true
-
-func (m *RequestLog) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *RequestLog) GetModuleId() string {
- if m != nil && m.ModuleId != nil {
- return *m.ModuleId
- }
- return Default_RequestLog_ModuleId
-}
-
-func (m *RequestLog) GetVersionId() string {
- if m != nil && m.VersionId != nil {
- return *m.VersionId
- }
- return ""
-}
-
-func (m *RequestLog) GetRequestId() []byte {
- if m != nil {
- return m.RequestId
- }
- return nil
-}
-
-func (m *RequestLog) GetOffset() *LogOffset {
- if m != nil {
- return m.Offset
- }
- return nil
-}
-
-func (m *RequestLog) GetIp() string {
- if m != nil && m.Ip != nil {
- return *m.Ip
- }
- return ""
-}
-
-func (m *RequestLog) GetNickname() string {
- if m != nil && m.Nickname != nil {
- return *m.Nickname
- }
- return ""
-}
-
-func (m *RequestLog) GetStartTime() int64 {
- if m != nil && m.StartTime != nil {
- return *m.StartTime
- }
- return 0
-}
-
-func (m *RequestLog) GetEndTime() int64 {
- if m != nil && m.EndTime != nil {
- return *m.EndTime
- }
- return 0
-}
-
-func (m *RequestLog) GetLatency() int64 {
- if m != nil && m.Latency != nil {
- return *m.Latency
- }
- return 0
-}
-
-func (m *RequestLog) GetMcycles() int64 {
- if m != nil && m.Mcycles != nil {
- return *m.Mcycles
- }
- return 0
-}
-
-func (m *RequestLog) GetMethod() string {
- if m != nil && m.Method != nil {
- return *m.Method
- }
- return ""
-}
-
-func (m *RequestLog) GetResource() string {
- if m != nil && m.Resource != nil {
- return *m.Resource
- }
- return ""
-}
-
-func (m *RequestLog) GetHttpVersion() string {
- if m != nil && m.HttpVersion != nil {
- return *m.HttpVersion
- }
- return ""
-}
-
-func (m *RequestLog) GetStatus() int32 {
- if m != nil && m.Status != nil {
- return *m.Status
- }
- return 0
-}
-
-func (m *RequestLog) GetResponseSize() int64 {
- if m != nil && m.ResponseSize != nil {
- return *m.ResponseSize
- }
- return 0
-}
-
-func (m *RequestLog) GetReferrer() string {
- if m != nil && m.Referrer != nil {
- return *m.Referrer
- }
- return ""
-}
-
-func (m *RequestLog) GetUserAgent() string {
- if m != nil && m.UserAgent != nil {
- return *m.UserAgent
- }
- return ""
-}
-
-func (m *RequestLog) GetUrlMapEntry() string {
- if m != nil && m.UrlMapEntry != nil {
- return *m.UrlMapEntry
- }
- return ""
-}
-
-func (m *RequestLog) GetCombined() string {
- if m != nil && m.Combined != nil {
- return *m.Combined
- }
- return ""
-}
-
-func (m *RequestLog) GetApiMcycles() int64 {
- if m != nil && m.ApiMcycles != nil {
- return *m.ApiMcycles
- }
- return 0
-}
-
-func (m *RequestLog) GetHost() string {
- if m != nil && m.Host != nil {
- return *m.Host
- }
- return ""
-}
-
-func (m *RequestLog) GetCost() float64 {
- if m != nil && m.Cost != nil {
- return *m.Cost
- }
- return 0
-}
-
-func (m *RequestLog) GetTaskQueueName() string {
- if m != nil && m.TaskQueueName != nil {
- return *m.TaskQueueName
- }
- return ""
-}
-
-func (m *RequestLog) GetTaskName() string {
- if m != nil && m.TaskName != nil {
- return *m.TaskName
- }
- return ""
-}
-
-func (m *RequestLog) GetWasLoadingRequest() bool {
- if m != nil && m.WasLoadingRequest != nil {
- return *m.WasLoadingRequest
- }
- return false
-}
-
-func (m *RequestLog) GetPendingTime() int64 {
- if m != nil && m.PendingTime != nil {
- return *m.PendingTime
- }
- return 0
-}
-
-func (m *RequestLog) GetReplicaIndex() int32 {
- if m != nil && m.ReplicaIndex != nil {
- return *m.ReplicaIndex
- }
- return Default_RequestLog_ReplicaIndex
-}
-
-func (m *RequestLog) GetFinished() bool {
- if m != nil && m.Finished != nil {
- return *m.Finished
- }
- return Default_RequestLog_Finished
-}
-
-func (m *RequestLog) GetCloneKey() []byte {
- if m != nil {
- return m.CloneKey
- }
- return nil
-}
-
-func (m *RequestLog) GetLine() []*LogLine {
- if m != nil {
- return m.Line
- }
- return nil
-}
-
-func (m *RequestLog) GetLinesIncomplete() bool {
- if m != nil && m.LinesIncomplete != nil {
- return *m.LinesIncomplete
- }
- return false
-}
-
-func (m *RequestLog) GetAppEngineRelease() []byte {
- if m != nil {
- return m.AppEngineRelease
- }
- return nil
-}
-
-func (m *RequestLog) GetExitReason() int32 {
- if m != nil && m.ExitReason != nil {
- return *m.ExitReason
- }
- return 0
-}
-
-func (m *RequestLog) GetWasThrottledForTime() bool {
- if m != nil && m.WasThrottledForTime != nil {
- return *m.WasThrottledForTime
- }
- return false
-}
-
-func (m *RequestLog) GetWasThrottledForRequests() bool {
- if m != nil && m.WasThrottledForRequests != nil {
- return *m.WasThrottledForRequests
- }
- return false
-}
-
-func (m *RequestLog) GetThrottledTime() int64 {
- if m != nil && m.ThrottledTime != nil {
- return *m.ThrottledTime
- }
- return 0
-}
-
-func (m *RequestLog) GetServerName() []byte {
- if m != nil {
- return m.ServerName
- }
- return nil
-}
-
-type LogModuleVersion struct {
- ModuleId *string `protobuf:"bytes,1,opt,name=module_id,def=default" json:"module_id,omitempty"`
- VersionId *string `protobuf:"bytes,2,opt,name=version_id" json:"version_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogModuleVersion) Reset() { *m = LogModuleVersion{} }
-func (m *LogModuleVersion) String() string { return proto.CompactTextString(m) }
-func (*LogModuleVersion) ProtoMessage() {}
-
-const Default_LogModuleVersion_ModuleId string = "default"
-
-func (m *LogModuleVersion) GetModuleId() string {
- if m != nil && m.ModuleId != nil {
- return *m.ModuleId
- }
- return Default_LogModuleVersion_ModuleId
-}
-
-func (m *LogModuleVersion) GetVersionId() string {
- if m != nil && m.VersionId != nil {
- return *m.VersionId
- }
- return ""
-}
-
-type LogReadRequest struct {
- AppId *string `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- VersionId []string `protobuf:"bytes,2,rep,name=version_id" json:"version_id,omitempty"`
- ModuleVersion []*LogModuleVersion `protobuf:"bytes,19,rep,name=module_version" json:"module_version,omitempty"`
- StartTime *int64 `protobuf:"varint,3,opt,name=start_time" json:"start_time,omitempty"`
- EndTime *int64 `protobuf:"varint,4,opt,name=end_time" json:"end_time,omitempty"`
- Offset *LogOffset `protobuf:"bytes,5,opt,name=offset" json:"offset,omitempty"`
- RequestId [][]byte `protobuf:"bytes,6,rep,name=request_id" json:"request_id,omitempty"`
- MinimumLogLevel *int32 `protobuf:"varint,7,opt,name=minimum_log_level" json:"minimum_log_level,omitempty"`
- IncludeIncomplete *bool `protobuf:"varint,8,opt,name=include_incomplete" json:"include_incomplete,omitempty"`
- Count *int64 `protobuf:"varint,9,opt,name=count" json:"count,omitempty"`
- CombinedLogRegex *string `protobuf:"bytes,14,opt,name=combined_log_regex" json:"combined_log_regex,omitempty"`
- HostRegex *string `protobuf:"bytes,15,opt,name=host_regex" json:"host_regex,omitempty"`
- ReplicaIndex *int32 `protobuf:"varint,16,opt,name=replica_index" json:"replica_index,omitempty"`
- IncludeAppLogs *bool `protobuf:"varint,10,opt,name=include_app_logs" json:"include_app_logs,omitempty"`
- AppLogsPerRequest *int32 `protobuf:"varint,17,opt,name=app_logs_per_request" json:"app_logs_per_request,omitempty"`
- IncludeHost *bool `protobuf:"varint,11,opt,name=include_host" json:"include_host,omitempty"`
- IncludeAll *bool `protobuf:"varint,12,opt,name=include_all" json:"include_all,omitempty"`
- CacheIterator *bool `protobuf:"varint,13,opt,name=cache_iterator" json:"cache_iterator,omitempty"`
- NumShards *int32 `protobuf:"varint,18,opt,name=num_shards" json:"num_shards,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogReadRequest) Reset() { *m = LogReadRequest{} }
-func (m *LogReadRequest) String() string { return proto.CompactTextString(m) }
-func (*LogReadRequest) ProtoMessage() {}
-
-func (m *LogReadRequest) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *LogReadRequest) GetVersionId() []string {
- if m != nil {
- return m.VersionId
- }
- return nil
-}
-
-func (m *LogReadRequest) GetModuleVersion() []*LogModuleVersion {
- if m != nil {
- return m.ModuleVersion
- }
- return nil
-}
-
-func (m *LogReadRequest) GetStartTime() int64 {
- if m != nil && m.StartTime != nil {
- return *m.StartTime
- }
- return 0
-}
-
-func (m *LogReadRequest) GetEndTime() int64 {
- if m != nil && m.EndTime != nil {
- return *m.EndTime
- }
- return 0
-}
-
-func (m *LogReadRequest) GetOffset() *LogOffset {
- if m != nil {
- return m.Offset
- }
- return nil
-}
-
-func (m *LogReadRequest) GetRequestId() [][]byte {
- if m != nil {
- return m.RequestId
- }
- return nil
-}
-
-func (m *LogReadRequest) GetMinimumLogLevel() int32 {
- if m != nil && m.MinimumLogLevel != nil {
- return *m.MinimumLogLevel
- }
- return 0
-}
-
-func (m *LogReadRequest) GetIncludeIncomplete() bool {
- if m != nil && m.IncludeIncomplete != nil {
- return *m.IncludeIncomplete
- }
- return false
-}
-
-func (m *LogReadRequest) GetCount() int64 {
- if m != nil && m.Count != nil {
- return *m.Count
- }
- return 0
-}
-
-func (m *LogReadRequest) GetCombinedLogRegex() string {
- if m != nil && m.CombinedLogRegex != nil {
- return *m.CombinedLogRegex
- }
- return ""
-}
-
-func (m *LogReadRequest) GetHostRegex() string {
- if m != nil && m.HostRegex != nil {
- return *m.HostRegex
- }
- return ""
-}
-
-func (m *LogReadRequest) GetReplicaIndex() int32 {
- if m != nil && m.ReplicaIndex != nil {
- return *m.ReplicaIndex
- }
- return 0
-}
-
-func (m *LogReadRequest) GetIncludeAppLogs() bool {
- if m != nil && m.IncludeAppLogs != nil {
- return *m.IncludeAppLogs
- }
- return false
-}
-
-func (m *LogReadRequest) GetAppLogsPerRequest() int32 {
- if m != nil && m.AppLogsPerRequest != nil {
- return *m.AppLogsPerRequest
- }
- return 0
-}
-
-func (m *LogReadRequest) GetIncludeHost() bool {
- if m != nil && m.IncludeHost != nil {
- return *m.IncludeHost
- }
- return false
-}
-
-func (m *LogReadRequest) GetIncludeAll() bool {
- if m != nil && m.IncludeAll != nil {
- return *m.IncludeAll
- }
- return false
-}
-
-func (m *LogReadRequest) GetCacheIterator() bool {
- if m != nil && m.CacheIterator != nil {
- return *m.CacheIterator
- }
- return false
-}
-
-func (m *LogReadRequest) GetNumShards() int32 {
- if m != nil && m.NumShards != nil {
- return *m.NumShards
- }
- return 0
-}
-
-type LogReadResponse struct {
- Log []*RequestLog `protobuf:"bytes,1,rep,name=log" json:"log,omitempty"`
- Offset *LogOffset `protobuf:"bytes,2,opt,name=offset" json:"offset,omitempty"`
- LastEndTime *int64 `protobuf:"varint,3,opt,name=last_end_time" json:"last_end_time,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogReadResponse) Reset() { *m = LogReadResponse{} }
-func (m *LogReadResponse) String() string { return proto.CompactTextString(m) }
-func (*LogReadResponse) ProtoMessage() {}
-
-func (m *LogReadResponse) GetLog() []*RequestLog {
- if m != nil {
- return m.Log
- }
- return nil
-}
-
-func (m *LogReadResponse) GetOffset() *LogOffset {
- if m != nil {
- return m.Offset
- }
- return nil
-}
-
-func (m *LogReadResponse) GetLastEndTime() int64 {
- if m != nil && m.LastEndTime != nil {
- return *m.LastEndTime
- }
- return 0
-}
-
-type LogUsageRecord struct {
- VersionId *string `protobuf:"bytes,1,opt,name=version_id" json:"version_id,omitempty"`
- StartTime *int32 `protobuf:"varint,2,opt,name=start_time" json:"start_time,omitempty"`
- EndTime *int32 `protobuf:"varint,3,opt,name=end_time" json:"end_time,omitempty"`
- Count *int64 `protobuf:"varint,4,opt,name=count" json:"count,omitempty"`
- TotalSize *int64 `protobuf:"varint,5,opt,name=total_size" json:"total_size,omitempty"`
- Records *int32 `protobuf:"varint,6,opt,name=records" json:"records,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogUsageRecord) Reset() { *m = LogUsageRecord{} }
-func (m *LogUsageRecord) String() string { return proto.CompactTextString(m) }
-func (*LogUsageRecord) ProtoMessage() {}
-
-func (m *LogUsageRecord) GetVersionId() string {
- if m != nil && m.VersionId != nil {
- return *m.VersionId
- }
- return ""
-}
-
-func (m *LogUsageRecord) GetStartTime() int32 {
- if m != nil && m.StartTime != nil {
- return *m.StartTime
- }
- return 0
-}
-
-func (m *LogUsageRecord) GetEndTime() int32 {
- if m != nil && m.EndTime != nil {
- return *m.EndTime
- }
- return 0
-}
-
-func (m *LogUsageRecord) GetCount() int64 {
- if m != nil && m.Count != nil {
- return *m.Count
- }
- return 0
-}
-
-func (m *LogUsageRecord) GetTotalSize() int64 {
- if m != nil && m.TotalSize != nil {
- return *m.TotalSize
- }
- return 0
-}
-
-func (m *LogUsageRecord) GetRecords() int32 {
- if m != nil && m.Records != nil {
- return *m.Records
- }
- return 0
-}
-
-type LogUsageRequest struct {
- AppId *string `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- VersionId []string `protobuf:"bytes,2,rep,name=version_id" json:"version_id,omitempty"`
- StartTime *int32 `protobuf:"varint,3,opt,name=start_time" json:"start_time,omitempty"`
- EndTime *int32 `protobuf:"varint,4,opt,name=end_time" json:"end_time,omitempty"`
- ResolutionHours *uint32 `protobuf:"varint,5,opt,name=resolution_hours,def=1" json:"resolution_hours,omitempty"`
- CombineVersions *bool `protobuf:"varint,6,opt,name=combine_versions" json:"combine_versions,omitempty"`
- UsageVersion *int32 `protobuf:"varint,7,opt,name=usage_version" json:"usage_version,omitempty"`
- VersionsOnly *bool `protobuf:"varint,8,opt,name=versions_only" json:"versions_only,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogUsageRequest) Reset() { *m = LogUsageRequest{} }
-func (m *LogUsageRequest) String() string { return proto.CompactTextString(m) }
-func (*LogUsageRequest) ProtoMessage() {}
-
-const Default_LogUsageRequest_ResolutionHours uint32 = 1
-
-func (m *LogUsageRequest) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *LogUsageRequest) GetVersionId() []string {
- if m != nil {
- return m.VersionId
- }
- return nil
-}
-
-func (m *LogUsageRequest) GetStartTime() int32 {
- if m != nil && m.StartTime != nil {
- return *m.StartTime
- }
- return 0
-}
-
-func (m *LogUsageRequest) GetEndTime() int32 {
- if m != nil && m.EndTime != nil {
- return *m.EndTime
- }
- return 0
-}
-
-func (m *LogUsageRequest) GetResolutionHours() uint32 {
- if m != nil && m.ResolutionHours != nil {
- return *m.ResolutionHours
- }
- return Default_LogUsageRequest_ResolutionHours
-}
-
-func (m *LogUsageRequest) GetCombineVersions() bool {
- if m != nil && m.CombineVersions != nil {
- return *m.CombineVersions
- }
- return false
-}
-
-func (m *LogUsageRequest) GetUsageVersion() int32 {
- if m != nil && m.UsageVersion != nil {
- return *m.UsageVersion
- }
- return 0
-}
-
-func (m *LogUsageRequest) GetVersionsOnly() bool {
- if m != nil && m.VersionsOnly != nil {
- return *m.VersionsOnly
- }
- return false
-}
-
-type LogUsageResponse struct {
- Usage []*LogUsageRecord `protobuf:"bytes,1,rep,name=usage" json:"usage,omitempty"`
- Summary *LogUsageRecord `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *LogUsageResponse) Reset() { *m = LogUsageResponse{} }
-func (m *LogUsageResponse) String() string { return proto.CompactTextString(m) }
-func (*LogUsageResponse) ProtoMessage() {}
-
-func (m *LogUsageResponse) GetUsage() []*LogUsageRecord {
- if m != nil {
- return m.Usage
- }
- return nil
-}
-
-func (m *LogUsageResponse) GetSummary() *LogUsageRecord {
- if m != nil {
- return m.Summary
- }
- return nil
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/log/log_service.proto b/vendor/google.golang.org/appengine/internal/log/log_service.proto
deleted file mode 100644
index 8981dc4..0000000
--- a/vendor/google.golang.org/appengine/internal/log/log_service.proto
+++ /dev/null
@@ -1,150 +0,0 @@
-syntax = "proto2";
-option go_package = "log";
-
-package appengine;
-
-message LogServiceError {
- enum ErrorCode {
- OK = 0;
- INVALID_REQUEST = 1;
- STORAGE_ERROR = 2;
- }
-}
-
-message UserAppLogLine {
- required int64 timestamp_usec = 1;
- required int64 level = 2;
- required string message = 3;
-}
-
-message UserAppLogGroup {
- repeated UserAppLogLine log_line = 2;
-}
-
-message FlushRequest {
- optional bytes logs = 1;
-}
-
-message SetStatusRequest {
- required string status = 1;
-}
-
-
-message LogOffset {
- optional bytes request_id = 1;
-}
-
-message LogLine {
- required int64 time = 1;
- required int32 level = 2;
- required string log_message = 3;
-}
-
-message RequestLog {
- required string app_id = 1;
- optional string module_id = 37 [default="default"];
- required string version_id = 2;
- required bytes request_id = 3;
- optional LogOffset offset = 35;
- required string ip = 4;
- optional string nickname = 5;
- required int64 start_time = 6;
- required int64 end_time = 7;
- required int64 latency = 8;
- required int64 mcycles = 9;
- required string method = 10;
- required string resource = 11;
- required string http_version = 12;
- required int32 status = 13;
- required int64 response_size = 14;
- optional string referrer = 15;
- optional string user_agent = 16;
- required string url_map_entry = 17;
- required string combined = 18;
- optional int64 api_mcycles = 19;
- optional string host = 20;
- optional double cost = 21;
-
- optional string task_queue_name = 22;
- optional string task_name = 23;
-
- optional bool was_loading_request = 24;
- optional int64 pending_time = 25;
- optional int32 replica_index = 26 [default = -1];
- optional bool finished = 27 [default = true];
- optional bytes clone_key = 28;
-
- repeated LogLine line = 29;
-
- optional bool lines_incomplete = 36;
- optional bytes app_engine_release = 38;
-
- optional int32 exit_reason = 30;
- optional bool was_throttled_for_time = 31;
- optional bool was_throttled_for_requests = 32;
- optional int64 throttled_time = 33;
-
- optional bytes server_name = 34;
-}
-
-message LogModuleVersion {
- optional string module_id = 1 [default="default"];
- optional string version_id = 2;
-}
-
-message LogReadRequest {
- required string app_id = 1;
- repeated string version_id = 2;
- repeated LogModuleVersion module_version = 19;
-
- optional int64 start_time = 3;
- optional int64 end_time = 4;
- optional LogOffset offset = 5;
- repeated bytes request_id = 6;
-
- optional int32 minimum_log_level = 7;
- optional bool include_incomplete = 8;
- optional int64 count = 9;
-
- optional string combined_log_regex = 14;
- optional string host_regex = 15;
- optional int32 replica_index = 16;
-
- optional bool include_app_logs = 10;
- optional int32 app_logs_per_request = 17;
- optional bool include_host = 11;
- optional bool include_all = 12;
- optional bool cache_iterator = 13;
- optional int32 num_shards = 18;
-}
-
-message LogReadResponse {
- repeated RequestLog log = 1;
- optional LogOffset offset = 2;
- optional int64 last_end_time = 3;
-}
-
-message LogUsageRecord {
- optional string version_id = 1;
- optional int32 start_time = 2;
- optional int32 end_time = 3;
- optional int64 count = 4;
- optional int64 total_size = 5;
- optional int32 records = 6;
-}
-
-message LogUsageRequest {
- required string app_id = 1;
- repeated string version_id = 2;
- optional int32 start_time = 3;
- optional int32 end_time = 4;
- optional uint32 resolution_hours = 5 [default = 1];
- optional bool combine_versions = 6;
- optional int32 usage_version = 7;
- optional bool versions_only = 8;
-}
-
-message LogUsageResponse {
- repeated LogUsageRecord usage = 1;
- optional LogUsageRecord summary = 2;
-}
diff --git a/vendor/google.golang.org/appengine/internal/mail/mail_service.pb.go b/vendor/google.golang.org/appengine/internal/mail/mail_service.pb.go
deleted file mode 100644
index b8d5f03..0000000
--- a/vendor/google.golang.org/appengine/internal/mail/mail_service.pb.go
+++ /dev/null
@@ -1,229 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/mail/mail_service.proto
-// DO NOT EDIT!
-
-/*
-Package mail is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/mail/mail_service.proto
-
-It has these top-level messages:
- MailServiceError
- MailAttachment
- MailHeader
- MailMessage
-*/
-package mail
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type MailServiceError_ErrorCode int32
-
-const (
- MailServiceError_OK MailServiceError_ErrorCode = 0
- MailServiceError_INTERNAL_ERROR MailServiceError_ErrorCode = 1
- MailServiceError_BAD_REQUEST MailServiceError_ErrorCode = 2
- MailServiceError_UNAUTHORIZED_SENDER MailServiceError_ErrorCode = 3
- MailServiceError_INVALID_ATTACHMENT_TYPE MailServiceError_ErrorCode = 4
- MailServiceError_INVALID_HEADER_NAME MailServiceError_ErrorCode = 5
- MailServiceError_INVALID_CONTENT_ID MailServiceError_ErrorCode = 6
-)
-
-var MailServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INTERNAL_ERROR",
- 2: "BAD_REQUEST",
- 3: "UNAUTHORIZED_SENDER",
- 4: "INVALID_ATTACHMENT_TYPE",
- 5: "INVALID_HEADER_NAME",
- 6: "INVALID_CONTENT_ID",
-}
-var MailServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INTERNAL_ERROR": 1,
- "BAD_REQUEST": 2,
- "UNAUTHORIZED_SENDER": 3,
- "INVALID_ATTACHMENT_TYPE": 4,
- "INVALID_HEADER_NAME": 5,
- "INVALID_CONTENT_ID": 6,
-}
-
-func (x MailServiceError_ErrorCode) Enum() *MailServiceError_ErrorCode {
- p := new(MailServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x MailServiceError_ErrorCode) String() string {
- return proto.EnumName(MailServiceError_ErrorCode_name, int32(x))
-}
-func (x *MailServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MailServiceError_ErrorCode_value, data, "MailServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = MailServiceError_ErrorCode(value)
- return nil
-}
-
-type MailServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MailServiceError) Reset() { *m = MailServiceError{} }
-func (m *MailServiceError) String() string { return proto.CompactTextString(m) }
-func (*MailServiceError) ProtoMessage() {}
-
-type MailAttachment struct {
- FileName *string `protobuf:"bytes,1,req,name=FileName" json:"FileName,omitempty"`
- Data []byte `protobuf:"bytes,2,req,name=Data" json:"Data,omitempty"`
- ContentID *string `protobuf:"bytes,3,opt,name=ContentID" json:"ContentID,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MailAttachment) Reset() { *m = MailAttachment{} }
-func (m *MailAttachment) String() string { return proto.CompactTextString(m) }
-func (*MailAttachment) ProtoMessage() {}
-
-func (m *MailAttachment) GetFileName() string {
- if m != nil && m.FileName != nil {
- return *m.FileName
- }
- return ""
-}
-
-func (m *MailAttachment) GetData() []byte {
- if m != nil {
- return m.Data
- }
- return nil
-}
-
-func (m *MailAttachment) GetContentID() string {
- if m != nil && m.ContentID != nil {
- return *m.ContentID
- }
- return ""
-}
-
-type MailHeader struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Value *string `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MailHeader) Reset() { *m = MailHeader{} }
-func (m *MailHeader) String() string { return proto.CompactTextString(m) }
-func (*MailHeader) ProtoMessage() {}
-
-func (m *MailHeader) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *MailHeader) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-type MailMessage struct {
- Sender *string `protobuf:"bytes,1,req,name=Sender" json:"Sender,omitempty"`
- ReplyTo *string `protobuf:"bytes,2,opt,name=ReplyTo" json:"ReplyTo,omitempty"`
- To []string `protobuf:"bytes,3,rep,name=To" json:"To,omitempty"`
- Cc []string `protobuf:"bytes,4,rep,name=Cc" json:"Cc,omitempty"`
- Bcc []string `protobuf:"bytes,5,rep,name=Bcc" json:"Bcc,omitempty"`
- Subject *string `protobuf:"bytes,6,req,name=Subject" json:"Subject,omitempty"`
- TextBody *string `protobuf:"bytes,7,opt,name=TextBody" json:"TextBody,omitempty"`
- HtmlBody *string `protobuf:"bytes,8,opt,name=HtmlBody" json:"HtmlBody,omitempty"`
- Attachment []*MailAttachment `protobuf:"bytes,9,rep,name=Attachment" json:"Attachment,omitempty"`
- Header []*MailHeader `protobuf:"bytes,10,rep,name=Header" json:"Header,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MailMessage) Reset() { *m = MailMessage{} }
-func (m *MailMessage) String() string { return proto.CompactTextString(m) }
-func (*MailMessage) ProtoMessage() {}
-
-func (m *MailMessage) GetSender() string {
- if m != nil && m.Sender != nil {
- return *m.Sender
- }
- return ""
-}
-
-func (m *MailMessage) GetReplyTo() string {
- if m != nil && m.ReplyTo != nil {
- return *m.ReplyTo
- }
- return ""
-}
-
-func (m *MailMessage) GetTo() []string {
- if m != nil {
- return m.To
- }
- return nil
-}
-
-func (m *MailMessage) GetCc() []string {
- if m != nil {
- return m.Cc
- }
- return nil
-}
-
-func (m *MailMessage) GetBcc() []string {
- if m != nil {
- return m.Bcc
- }
- return nil
-}
-
-func (m *MailMessage) GetSubject() string {
- if m != nil && m.Subject != nil {
- return *m.Subject
- }
- return ""
-}
-
-func (m *MailMessage) GetTextBody() string {
- if m != nil && m.TextBody != nil {
- return *m.TextBody
- }
- return ""
-}
-
-func (m *MailMessage) GetHtmlBody() string {
- if m != nil && m.HtmlBody != nil {
- return *m.HtmlBody
- }
- return ""
-}
-
-func (m *MailMessage) GetAttachment() []*MailAttachment {
- if m != nil {
- return m.Attachment
- }
- return nil
-}
-
-func (m *MailMessage) GetHeader() []*MailHeader {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/mail/mail_service.proto b/vendor/google.golang.org/appengine/internal/mail/mail_service.proto
deleted file mode 100644
index 4e57b7a..0000000
--- a/vendor/google.golang.org/appengine/internal/mail/mail_service.proto
+++ /dev/null
@@ -1,45 +0,0 @@
-syntax = "proto2";
-option go_package = "mail";
-
-package appengine;
-
-message MailServiceError {
- enum ErrorCode {
- OK = 0;
- INTERNAL_ERROR = 1;
- BAD_REQUEST = 2;
- UNAUTHORIZED_SENDER = 3;
- INVALID_ATTACHMENT_TYPE = 4;
- INVALID_HEADER_NAME = 5;
- INVALID_CONTENT_ID = 6;
- }
-}
-
-message MailAttachment {
- required string FileName = 1;
- required bytes Data = 2;
- optional string ContentID = 3;
-}
-
-message MailHeader {
- required string name = 1;
- required string value = 2;
-}
-
-message MailMessage {
- required string Sender = 1;
- optional string ReplyTo = 2;
-
- repeated string To = 3;
- repeated string Cc = 4;
- repeated string Bcc = 5;
-
- required string Subject = 6;
-
- optional string TextBody = 7;
- optional string HtmlBody = 8;
-
- repeated MailAttachment Attachment = 9;
-
- repeated MailHeader Header = 10;
-}
diff --git a/vendor/google.golang.org/appengine/internal/memcache/memcache_service.pb.go b/vendor/google.golang.org/appengine/internal/memcache/memcache_service.pb.go
deleted file mode 100644
index 252fef8..0000000
--- a/vendor/google.golang.org/appengine/internal/memcache/memcache_service.pb.go
+++ /dev/null
@@ -1,938 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/memcache/memcache_service.proto
-// DO NOT EDIT!
-
-/*
-Package memcache is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/memcache/memcache_service.proto
-
-It has these top-level messages:
- MemcacheServiceError
- AppOverride
- MemcacheGetRequest
- MemcacheGetResponse
- MemcacheSetRequest
- MemcacheSetResponse
- MemcacheDeleteRequest
- MemcacheDeleteResponse
- MemcacheIncrementRequest
- MemcacheIncrementResponse
- MemcacheBatchIncrementRequest
- MemcacheBatchIncrementResponse
- MemcacheFlushRequest
- MemcacheFlushResponse
- MemcacheStatsRequest
- MergedNamespaceStats
- MemcacheStatsResponse
- MemcacheGrabTailRequest
- MemcacheGrabTailResponse
-*/
-package memcache
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type MemcacheServiceError_ErrorCode int32
-
-const (
- MemcacheServiceError_OK MemcacheServiceError_ErrorCode = 0
- MemcacheServiceError_UNSPECIFIED_ERROR MemcacheServiceError_ErrorCode = 1
- MemcacheServiceError_NAMESPACE_NOT_SET MemcacheServiceError_ErrorCode = 2
- MemcacheServiceError_PERMISSION_DENIED MemcacheServiceError_ErrorCode = 3
- MemcacheServiceError_INVALID_VALUE MemcacheServiceError_ErrorCode = 6
-)
-
-var MemcacheServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "UNSPECIFIED_ERROR",
- 2: "NAMESPACE_NOT_SET",
- 3: "PERMISSION_DENIED",
- 6: "INVALID_VALUE",
-}
-var MemcacheServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "UNSPECIFIED_ERROR": 1,
- "NAMESPACE_NOT_SET": 2,
- "PERMISSION_DENIED": 3,
- "INVALID_VALUE": 6,
-}
-
-func (x MemcacheServiceError_ErrorCode) Enum() *MemcacheServiceError_ErrorCode {
- p := new(MemcacheServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x MemcacheServiceError_ErrorCode) String() string {
- return proto.EnumName(MemcacheServiceError_ErrorCode_name, int32(x))
-}
-func (x *MemcacheServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheServiceError_ErrorCode_value, data, "MemcacheServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = MemcacheServiceError_ErrorCode(value)
- return nil
-}
-
-type MemcacheSetRequest_SetPolicy int32
-
-const (
- MemcacheSetRequest_SET MemcacheSetRequest_SetPolicy = 1
- MemcacheSetRequest_ADD MemcacheSetRequest_SetPolicy = 2
- MemcacheSetRequest_REPLACE MemcacheSetRequest_SetPolicy = 3
- MemcacheSetRequest_CAS MemcacheSetRequest_SetPolicy = 4
-)
-
-var MemcacheSetRequest_SetPolicy_name = map[int32]string{
- 1: "SET",
- 2: "ADD",
- 3: "REPLACE",
- 4: "CAS",
-}
-var MemcacheSetRequest_SetPolicy_value = map[string]int32{
- "SET": 1,
- "ADD": 2,
- "REPLACE": 3,
- "CAS": 4,
-}
-
-func (x MemcacheSetRequest_SetPolicy) Enum() *MemcacheSetRequest_SetPolicy {
- p := new(MemcacheSetRequest_SetPolicy)
- *p = x
- return p
-}
-func (x MemcacheSetRequest_SetPolicy) String() string {
- return proto.EnumName(MemcacheSetRequest_SetPolicy_name, int32(x))
-}
-func (x *MemcacheSetRequest_SetPolicy) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheSetRequest_SetPolicy_value, data, "MemcacheSetRequest_SetPolicy")
- if err != nil {
- return err
- }
- *x = MemcacheSetRequest_SetPolicy(value)
- return nil
-}
-
-type MemcacheSetResponse_SetStatusCode int32
-
-const (
- MemcacheSetResponse_STORED MemcacheSetResponse_SetStatusCode = 1
- MemcacheSetResponse_NOT_STORED MemcacheSetResponse_SetStatusCode = 2
- MemcacheSetResponse_ERROR MemcacheSetResponse_SetStatusCode = 3
- MemcacheSetResponse_EXISTS MemcacheSetResponse_SetStatusCode = 4
-)
-
-var MemcacheSetResponse_SetStatusCode_name = map[int32]string{
- 1: "STORED",
- 2: "NOT_STORED",
- 3: "ERROR",
- 4: "EXISTS",
-}
-var MemcacheSetResponse_SetStatusCode_value = map[string]int32{
- "STORED": 1,
- "NOT_STORED": 2,
- "ERROR": 3,
- "EXISTS": 4,
-}
-
-func (x MemcacheSetResponse_SetStatusCode) Enum() *MemcacheSetResponse_SetStatusCode {
- p := new(MemcacheSetResponse_SetStatusCode)
- *p = x
- return p
-}
-func (x MemcacheSetResponse_SetStatusCode) String() string {
- return proto.EnumName(MemcacheSetResponse_SetStatusCode_name, int32(x))
-}
-func (x *MemcacheSetResponse_SetStatusCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheSetResponse_SetStatusCode_value, data, "MemcacheSetResponse_SetStatusCode")
- if err != nil {
- return err
- }
- *x = MemcacheSetResponse_SetStatusCode(value)
- return nil
-}
-
-type MemcacheDeleteResponse_DeleteStatusCode int32
-
-const (
- MemcacheDeleteResponse_DELETED MemcacheDeleteResponse_DeleteStatusCode = 1
- MemcacheDeleteResponse_NOT_FOUND MemcacheDeleteResponse_DeleteStatusCode = 2
-)
-
-var MemcacheDeleteResponse_DeleteStatusCode_name = map[int32]string{
- 1: "DELETED",
- 2: "NOT_FOUND",
-}
-var MemcacheDeleteResponse_DeleteStatusCode_value = map[string]int32{
- "DELETED": 1,
- "NOT_FOUND": 2,
-}
-
-func (x MemcacheDeleteResponse_DeleteStatusCode) Enum() *MemcacheDeleteResponse_DeleteStatusCode {
- p := new(MemcacheDeleteResponse_DeleteStatusCode)
- *p = x
- return p
-}
-func (x MemcacheDeleteResponse_DeleteStatusCode) String() string {
- return proto.EnumName(MemcacheDeleteResponse_DeleteStatusCode_name, int32(x))
-}
-func (x *MemcacheDeleteResponse_DeleteStatusCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheDeleteResponse_DeleteStatusCode_value, data, "MemcacheDeleteResponse_DeleteStatusCode")
- if err != nil {
- return err
- }
- *x = MemcacheDeleteResponse_DeleteStatusCode(value)
- return nil
-}
-
-type MemcacheIncrementRequest_Direction int32
-
-const (
- MemcacheIncrementRequest_INCREMENT MemcacheIncrementRequest_Direction = 1
- MemcacheIncrementRequest_DECREMENT MemcacheIncrementRequest_Direction = 2
-)
-
-var MemcacheIncrementRequest_Direction_name = map[int32]string{
- 1: "INCREMENT",
- 2: "DECREMENT",
-}
-var MemcacheIncrementRequest_Direction_value = map[string]int32{
- "INCREMENT": 1,
- "DECREMENT": 2,
-}
-
-func (x MemcacheIncrementRequest_Direction) Enum() *MemcacheIncrementRequest_Direction {
- p := new(MemcacheIncrementRequest_Direction)
- *p = x
- return p
-}
-func (x MemcacheIncrementRequest_Direction) String() string {
- return proto.EnumName(MemcacheIncrementRequest_Direction_name, int32(x))
-}
-func (x *MemcacheIncrementRequest_Direction) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheIncrementRequest_Direction_value, data, "MemcacheIncrementRequest_Direction")
- if err != nil {
- return err
- }
- *x = MemcacheIncrementRequest_Direction(value)
- return nil
-}
-
-type MemcacheIncrementResponse_IncrementStatusCode int32
-
-const (
- MemcacheIncrementResponse_OK MemcacheIncrementResponse_IncrementStatusCode = 1
- MemcacheIncrementResponse_NOT_CHANGED MemcacheIncrementResponse_IncrementStatusCode = 2
- MemcacheIncrementResponse_ERROR MemcacheIncrementResponse_IncrementStatusCode = 3
-)
-
-var MemcacheIncrementResponse_IncrementStatusCode_name = map[int32]string{
- 1: "OK",
- 2: "NOT_CHANGED",
- 3: "ERROR",
-}
-var MemcacheIncrementResponse_IncrementStatusCode_value = map[string]int32{
- "OK": 1,
- "NOT_CHANGED": 2,
- "ERROR": 3,
-}
-
-func (x MemcacheIncrementResponse_IncrementStatusCode) Enum() *MemcacheIncrementResponse_IncrementStatusCode {
- p := new(MemcacheIncrementResponse_IncrementStatusCode)
- *p = x
- return p
-}
-func (x MemcacheIncrementResponse_IncrementStatusCode) String() string {
- return proto.EnumName(MemcacheIncrementResponse_IncrementStatusCode_name, int32(x))
-}
-func (x *MemcacheIncrementResponse_IncrementStatusCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(MemcacheIncrementResponse_IncrementStatusCode_value, data, "MemcacheIncrementResponse_IncrementStatusCode")
- if err != nil {
- return err
- }
- *x = MemcacheIncrementResponse_IncrementStatusCode(value)
- return nil
-}
-
-type MemcacheServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheServiceError) Reset() { *m = MemcacheServiceError{} }
-func (m *MemcacheServiceError) String() string { return proto.CompactTextString(m) }
-func (*MemcacheServiceError) ProtoMessage() {}
-
-type AppOverride struct {
- AppId *string `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- NumMemcachegBackends *int32 `protobuf:"varint,2,opt,name=num_memcacheg_backends" json:"num_memcacheg_backends,omitempty"`
- IgnoreShardlock *bool `protobuf:"varint,3,opt,name=ignore_shardlock" json:"ignore_shardlock,omitempty"`
- MemcachePoolHint *string `protobuf:"bytes,4,opt,name=memcache_pool_hint" json:"memcache_pool_hint,omitempty"`
- MemcacheShardingStrategy []byte `protobuf:"bytes,5,opt,name=memcache_sharding_strategy" json:"memcache_sharding_strategy,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AppOverride) Reset() { *m = AppOverride{} }
-func (m *AppOverride) String() string { return proto.CompactTextString(m) }
-func (*AppOverride) ProtoMessage() {}
-
-func (m *AppOverride) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *AppOverride) GetNumMemcachegBackends() int32 {
- if m != nil && m.NumMemcachegBackends != nil {
- return *m.NumMemcachegBackends
- }
- return 0
-}
-
-func (m *AppOverride) GetIgnoreShardlock() bool {
- if m != nil && m.IgnoreShardlock != nil {
- return *m.IgnoreShardlock
- }
- return false
-}
-
-func (m *AppOverride) GetMemcachePoolHint() string {
- if m != nil && m.MemcachePoolHint != nil {
- return *m.MemcachePoolHint
- }
- return ""
-}
-
-func (m *AppOverride) GetMemcacheShardingStrategy() []byte {
- if m != nil {
- return m.MemcacheShardingStrategy
- }
- return nil
-}
-
-type MemcacheGetRequest struct {
- Key [][]byte `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
- NameSpace *string `protobuf:"bytes,2,opt,name=name_space,def=" json:"name_space,omitempty"`
- ForCas *bool `protobuf:"varint,4,opt,name=for_cas" json:"for_cas,omitempty"`
- Override *AppOverride `protobuf:"bytes,5,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGetRequest) Reset() { *m = MemcacheGetRequest{} }
-func (m *MemcacheGetRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGetRequest) ProtoMessage() {}
-
-func (m *MemcacheGetRequest) GetKey() [][]byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *MemcacheGetRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheGetRequest) GetForCas() bool {
- if m != nil && m.ForCas != nil {
- return *m.ForCas
- }
- return false
-}
-
-func (m *MemcacheGetRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheGetResponse struct {
- Item []*MemcacheGetResponse_Item `protobuf:"group,1,rep,name=Item" json:"item,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGetResponse) Reset() { *m = MemcacheGetResponse{} }
-func (m *MemcacheGetResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGetResponse) ProtoMessage() {}
-
-func (m *MemcacheGetResponse) GetItem() []*MemcacheGetResponse_Item {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-type MemcacheGetResponse_Item struct {
- Key []byte `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
- Value []byte `protobuf:"bytes,3,req,name=value" json:"value,omitempty"`
- Flags *uint32 `protobuf:"fixed32,4,opt,name=flags" json:"flags,omitempty"`
- CasId *uint64 `protobuf:"fixed64,5,opt,name=cas_id" json:"cas_id,omitempty"`
- ExpiresInSeconds *int32 `protobuf:"varint,6,opt,name=expires_in_seconds" json:"expires_in_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGetResponse_Item) Reset() { *m = MemcacheGetResponse_Item{} }
-func (m *MemcacheGetResponse_Item) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGetResponse_Item) ProtoMessage() {}
-
-func (m *MemcacheGetResponse_Item) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *MemcacheGetResponse_Item) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *MemcacheGetResponse_Item) GetFlags() uint32 {
- if m != nil && m.Flags != nil {
- return *m.Flags
- }
- return 0
-}
-
-func (m *MemcacheGetResponse_Item) GetCasId() uint64 {
- if m != nil && m.CasId != nil {
- return *m.CasId
- }
- return 0
-}
-
-func (m *MemcacheGetResponse_Item) GetExpiresInSeconds() int32 {
- if m != nil && m.ExpiresInSeconds != nil {
- return *m.ExpiresInSeconds
- }
- return 0
-}
-
-type MemcacheSetRequest struct {
- Item []*MemcacheSetRequest_Item `protobuf:"group,1,rep,name=Item" json:"item,omitempty"`
- NameSpace *string `protobuf:"bytes,7,opt,name=name_space,def=" json:"name_space,omitempty"`
- Override *AppOverride `protobuf:"bytes,10,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheSetRequest) Reset() { *m = MemcacheSetRequest{} }
-func (m *MemcacheSetRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheSetRequest) ProtoMessage() {}
-
-func (m *MemcacheSetRequest) GetItem() []*MemcacheSetRequest_Item {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-func (m *MemcacheSetRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheSetRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheSetRequest_Item struct {
- Key []byte `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
- Value []byte `protobuf:"bytes,3,req,name=value" json:"value,omitempty"`
- Flags *uint32 `protobuf:"fixed32,4,opt,name=flags" json:"flags,omitempty"`
- SetPolicy *MemcacheSetRequest_SetPolicy `protobuf:"varint,5,opt,name=set_policy,enum=appengine.MemcacheSetRequest_SetPolicy,def=1" json:"set_policy,omitempty"`
- ExpirationTime *uint32 `protobuf:"fixed32,6,opt,name=expiration_time,def=0" json:"expiration_time,omitempty"`
- CasId *uint64 `protobuf:"fixed64,8,opt,name=cas_id" json:"cas_id,omitempty"`
- ForCas *bool `protobuf:"varint,9,opt,name=for_cas" json:"for_cas,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheSetRequest_Item) Reset() { *m = MemcacheSetRequest_Item{} }
-func (m *MemcacheSetRequest_Item) String() string { return proto.CompactTextString(m) }
-func (*MemcacheSetRequest_Item) ProtoMessage() {}
-
-const Default_MemcacheSetRequest_Item_SetPolicy MemcacheSetRequest_SetPolicy = MemcacheSetRequest_SET
-const Default_MemcacheSetRequest_Item_ExpirationTime uint32 = 0
-
-func (m *MemcacheSetRequest_Item) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *MemcacheSetRequest_Item) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *MemcacheSetRequest_Item) GetFlags() uint32 {
- if m != nil && m.Flags != nil {
- return *m.Flags
- }
- return 0
-}
-
-func (m *MemcacheSetRequest_Item) GetSetPolicy() MemcacheSetRequest_SetPolicy {
- if m != nil && m.SetPolicy != nil {
- return *m.SetPolicy
- }
- return Default_MemcacheSetRequest_Item_SetPolicy
-}
-
-func (m *MemcacheSetRequest_Item) GetExpirationTime() uint32 {
- if m != nil && m.ExpirationTime != nil {
- return *m.ExpirationTime
- }
- return Default_MemcacheSetRequest_Item_ExpirationTime
-}
-
-func (m *MemcacheSetRequest_Item) GetCasId() uint64 {
- if m != nil && m.CasId != nil {
- return *m.CasId
- }
- return 0
-}
-
-func (m *MemcacheSetRequest_Item) GetForCas() bool {
- if m != nil && m.ForCas != nil {
- return *m.ForCas
- }
- return false
-}
-
-type MemcacheSetResponse struct {
- SetStatus []MemcacheSetResponse_SetStatusCode `protobuf:"varint,1,rep,name=set_status,enum=appengine.MemcacheSetResponse_SetStatusCode" json:"set_status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheSetResponse) Reset() { *m = MemcacheSetResponse{} }
-func (m *MemcacheSetResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheSetResponse) ProtoMessage() {}
-
-func (m *MemcacheSetResponse) GetSetStatus() []MemcacheSetResponse_SetStatusCode {
- if m != nil {
- return m.SetStatus
- }
- return nil
-}
-
-type MemcacheDeleteRequest struct {
- Item []*MemcacheDeleteRequest_Item `protobuf:"group,1,rep,name=Item" json:"item,omitempty"`
- NameSpace *string `protobuf:"bytes,4,opt,name=name_space,def=" json:"name_space,omitempty"`
- Override *AppOverride `protobuf:"bytes,5,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheDeleteRequest) Reset() { *m = MemcacheDeleteRequest{} }
-func (m *MemcacheDeleteRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheDeleteRequest) ProtoMessage() {}
-
-func (m *MemcacheDeleteRequest) GetItem() []*MemcacheDeleteRequest_Item {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-func (m *MemcacheDeleteRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheDeleteRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheDeleteRequest_Item struct {
- Key []byte `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
- DeleteTime *uint32 `protobuf:"fixed32,3,opt,name=delete_time,def=0" json:"delete_time,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheDeleteRequest_Item) Reset() { *m = MemcacheDeleteRequest_Item{} }
-func (m *MemcacheDeleteRequest_Item) String() string { return proto.CompactTextString(m) }
-func (*MemcacheDeleteRequest_Item) ProtoMessage() {}
-
-const Default_MemcacheDeleteRequest_Item_DeleteTime uint32 = 0
-
-func (m *MemcacheDeleteRequest_Item) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *MemcacheDeleteRequest_Item) GetDeleteTime() uint32 {
- if m != nil && m.DeleteTime != nil {
- return *m.DeleteTime
- }
- return Default_MemcacheDeleteRequest_Item_DeleteTime
-}
-
-type MemcacheDeleteResponse struct {
- DeleteStatus []MemcacheDeleteResponse_DeleteStatusCode `protobuf:"varint,1,rep,name=delete_status,enum=appengine.MemcacheDeleteResponse_DeleteStatusCode" json:"delete_status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheDeleteResponse) Reset() { *m = MemcacheDeleteResponse{} }
-func (m *MemcacheDeleteResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheDeleteResponse) ProtoMessage() {}
-
-func (m *MemcacheDeleteResponse) GetDeleteStatus() []MemcacheDeleteResponse_DeleteStatusCode {
- if m != nil {
- return m.DeleteStatus
- }
- return nil
-}
-
-type MemcacheIncrementRequest struct {
- Key []byte `protobuf:"bytes,1,req,name=key" json:"key,omitempty"`
- NameSpace *string `protobuf:"bytes,4,opt,name=name_space,def=" json:"name_space,omitempty"`
- Delta *uint64 `protobuf:"varint,2,opt,name=delta,def=1" json:"delta,omitempty"`
- Direction *MemcacheIncrementRequest_Direction `protobuf:"varint,3,opt,name=direction,enum=appengine.MemcacheIncrementRequest_Direction,def=1" json:"direction,omitempty"`
- InitialValue *uint64 `protobuf:"varint,5,opt,name=initial_value" json:"initial_value,omitempty"`
- InitialFlags *uint32 `protobuf:"fixed32,6,opt,name=initial_flags" json:"initial_flags,omitempty"`
- Override *AppOverride `protobuf:"bytes,7,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheIncrementRequest) Reset() { *m = MemcacheIncrementRequest{} }
-func (m *MemcacheIncrementRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheIncrementRequest) ProtoMessage() {}
-
-const Default_MemcacheIncrementRequest_Delta uint64 = 1
-const Default_MemcacheIncrementRequest_Direction MemcacheIncrementRequest_Direction = MemcacheIncrementRequest_INCREMENT
-
-func (m *MemcacheIncrementRequest) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *MemcacheIncrementRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheIncrementRequest) GetDelta() uint64 {
- if m != nil && m.Delta != nil {
- return *m.Delta
- }
- return Default_MemcacheIncrementRequest_Delta
-}
-
-func (m *MemcacheIncrementRequest) GetDirection() MemcacheIncrementRequest_Direction {
- if m != nil && m.Direction != nil {
- return *m.Direction
- }
- return Default_MemcacheIncrementRequest_Direction
-}
-
-func (m *MemcacheIncrementRequest) GetInitialValue() uint64 {
- if m != nil && m.InitialValue != nil {
- return *m.InitialValue
- }
- return 0
-}
-
-func (m *MemcacheIncrementRequest) GetInitialFlags() uint32 {
- if m != nil && m.InitialFlags != nil {
- return *m.InitialFlags
- }
- return 0
-}
-
-func (m *MemcacheIncrementRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheIncrementResponse struct {
- NewValue *uint64 `protobuf:"varint,1,opt,name=new_value" json:"new_value,omitempty"`
- IncrementStatus *MemcacheIncrementResponse_IncrementStatusCode `protobuf:"varint,2,opt,name=increment_status,enum=appengine.MemcacheIncrementResponse_IncrementStatusCode" json:"increment_status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheIncrementResponse) Reset() { *m = MemcacheIncrementResponse{} }
-func (m *MemcacheIncrementResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheIncrementResponse) ProtoMessage() {}
-
-func (m *MemcacheIncrementResponse) GetNewValue() uint64 {
- if m != nil && m.NewValue != nil {
- return *m.NewValue
- }
- return 0
-}
-
-func (m *MemcacheIncrementResponse) GetIncrementStatus() MemcacheIncrementResponse_IncrementStatusCode {
- if m != nil && m.IncrementStatus != nil {
- return *m.IncrementStatus
- }
- return MemcacheIncrementResponse_OK
-}
-
-type MemcacheBatchIncrementRequest struct {
- NameSpace *string `protobuf:"bytes,1,opt,name=name_space,def=" json:"name_space,omitempty"`
- Item []*MemcacheIncrementRequest `protobuf:"bytes,2,rep,name=item" json:"item,omitempty"`
- Override *AppOverride `protobuf:"bytes,3,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheBatchIncrementRequest) Reset() { *m = MemcacheBatchIncrementRequest{} }
-func (m *MemcacheBatchIncrementRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheBatchIncrementRequest) ProtoMessage() {}
-
-func (m *MemcacheBatchIncrementRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheBatchIncrementRequest) GetItem() []*MemcacheIncrementRequest {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-func (m *MemcacheBatchIncrementRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheBatchIncrementResponse struct {
- Item []*MemcacheIncrementResponse `protobuf:"bytes,1,rep,name=item" json:"item,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheBatchIncrementResponse) Reset() { *m = MemcacheBatchIncrementResponse{} }
-func (m *MemcacheBatchIncrementResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheBatchIncrementResponse) ProtoMessage() {}
-
-func (m *MemcacheBatchIncrementResponse) GetItem() []*MemcacheIncrementResponse {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-type MemcacheFlushRequest struct {
- Override *AppOverride `protobuf:"bytes,1,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheFlushRequest) Reset() { *m = MemcacheFlushRequest{} }
-func (m *MemcacheFlushRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheFlushRequest) ProtoMessage() {}
-
-func (m *MemcacheFlushRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheFlushResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheFlushResponse) Reset() { *m = MemcacheFlushResponse{} }
-func (m *MemcacheFlushResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheFlushResponse) ProtoMessage() {}
-
-type MemcacheStatsRequest struct {
- Override *AppOverride `protobuf:"bytes,1,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheStatsRequest) Reset() { *m = MemcacheStatsRequest{} }
-func (m *MemcacheStatsRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheStatsRequest) ProtoMessage() {}
-
-func (m *MemcacheStatsRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MergedNamespaceStats struct {
- Hits *uint64 `protobuf:"varint,1,req,name=hits" json:"hits,omitempty"`
- Misses *uint64 `protobuf:"varint,2,req,name=misses" json:"misses,omitempty"`
- ByteHits *uint64 `protobuf:"varint,3,req,name=byte_hits" json:"byte_hits,omitempty"`
- Items *uint64 `protobuf:"varint,4,req,name=items" json:"items,omitempty"`
- Bytes *uint64 `protobuf:"varint,5,req,name=bytes" json:"bytes,omitempty"`
- OldestItemAge *uint32 `protobuf:"fixed32,6,req,name=oldest_item_age" json:"oldest_item_age,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MergedNamespaceStats) Reset() { *m = MergedNamespaceStats{} }
-func (m *MergedNamespaceStats) String() string { return proto.CompactTextString(m) }
-func (*MergedNamespaceStats) ProtoMessage() {}
-
-func (m *MergedNamespaceStats) GetHits() uint64 {
- if m != nil && m.Hits != nil {
- return *m.Hits
- }
- return 0
-}
-
-func (m *MergedNamespaceStats) GetMisses() uint64 {
- if m != nil && m.Misses != nil {
- return *m.Misses
- }
- return 0
-}
-
-func (m *MergedNamespaceStats) GetByteHits() uint64 {
- if m != nil && m.ByteHits != nil {
- return *m.ByteHits
- }
- return 0
-}
-
-func (m *MergedNamespaceStats) GetItems() uint64 {
- if m != nil && m.Items != nil {
- return *m.Items
- }
- return 0
-}
-
-func (m *MergedNamespaceStats) GetBytes() uint64 {
- if m != nil && m.Bytes != nil {
- return *m.Bytes
- }
- return 0
-}
-
-func (m *MergedNamespaceStats) GetOldestItemAge() uint32 {
- if m != nil && m.OldestItemAge != nil {
- return *m.OldestItemAge
- }
- return 0
-}
-
-type MemcacheStatsResponse struct {
- Stats *MergedNamespaceStats `protobuf:"bytes,1,opt,name=stats" json:"stats,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheStatsResponse) Reset() { *m = MemcacheStatsResponse{} }
-func (m *MemcacheStatsResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheStatsResponse) ProtoMessage() {}
-
-func (m *MemcacheStatsResponse) GetStats() *MergedNamespaceStats {
- if m != nil {
- return m.Stats
- }
- return nil
-}
-
-type MemcacheGrabTailRequest struct {
- ItemCount *int32 `protobuf:"varint,1,req,name=item_count" json:"item_count,omitempty"`
- NameSpace *string `protobuf:"bytes,2,opt,name=name_space,def=" json:"name_space,omitempty"`
- Override *AppOverride `protobuf:"bytes,3,opt,name=override" json:"override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGrabTailRequest) Reset() { *m = MemcacheGrabTailRequest{} }
-func (m *MemcacheGrabTailRequest) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGrabTailRequest) ProtoMessage() {}
-
-func (m *MemcacheGrabTailRequest) GetItemCount() int32 {
- if m != nil && m.ItemCount != nil {
- return *m.ItemCount
- }
- return 0
-}
-
-func (m *MemcacheGrabTailRequest) GetNameSpace() string {
- if m != nil && m.NameSpace != nil {
- return *m.NameSpace
- }
- return ""
-}
-
-func (m *MemcacheGrabTailRequest) GetOverride() *AppOverride {
- if m != nil {
- return m.Override
- }
- return nil
-}
-
-type MemcacheGrabTailResponse struct {
- Item []*MemcacheGrabTailResponse_Item `protobuf:"group,1,rep,name=Item" json:"item,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGrabTailResponse) Reset() { *m = MemcacheGrabTailResponse{} }
-func (m *MemcacheGrabTailResponse) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGrabTailResponse) ProtoMessage() {}
-
-func (m *MemcacheGrabTailResponse) GetItem() []*MemcacheGrabTailResponse_Item {
- if m != nil {
- return m.Item
- }
- return nil
-}
-
-type MemcacheGrabTailResponse_Item struct {
- Value []byte `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- Flags *uint32 `protobuf:"fixed32,3,opt,name=flags" json:"flags,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *MemcacheGrabTailResponse_Item) Reset() { *m = MemcacheGrabTailResponse_Item{} }
-func (m *MemcacheGrabTailResponse_Item) String() string { return proto.CompactTextString(m) }
-func (*MemcacheGrabTailResponse_Item) ProtoMessage() {}
-
-func (m *MemcacheGrabTailResponse_Item) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-func (m *MemcacheGrabTailResponse_Item) GetFlags() uint32 {
- if m != nil && m.Flags != nil {
- return *m.Flags
- }
- return 0
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/memcache/memcache_service.proto b/vendor/google.golang.org/appengine/internal/memcache/memcache_service.proto
deleted file mode 100644
index 5f0edcd..0000000
--- a/vendor/google.golang.org/appengine/internal/memcache/memcache_service.proto
+++ /dev/null
@@ -1,165 +0,0 @@
-syntax = "proto2";
-option go_package = "memcache";
-
-package appengine;
-
-message MemcacheServiceError {
- enum ErrorCode {
- OK = 0;
- UNSPECIFIED_ERROR = 1;
- NAMESPACE_NOT_SET = 2;
- PERMISSION_DENIED = 3;
- INVALID_VALUE = 6;
- }
-}
-
-message AppOverride {
- required string app_id = 1;
-
- optional int32 num_memcacheg_backends = 2 [deprecated=true];
- optional bool ignore_shardlock = 3 [deprecated=true];
- optional string memcache_pool_hint = 4 [deprecated=true];
- optional bytes memcache_sharding_strategy = 5 [deprecated=true];
-}
-
-message MemcacheGetRequest {
- repeated bytes key = 1;
- optional string name_space = 2 [default = ""];
- optional bool for_cas = 4;
- optional AppOverride override = 5;
-}
-
-message MemcacheGetResponse {
- repeated group Item = 1 {
- required bytes key = 2;
- required bytes value = 3;
- optional fixed32 flags = 4;
- optional fixed64 cas_id = 5;
- optional int32 expires_in_seconds = 6;
- }
-}
-
-message MemcacheSetRequest {
- enum SetPolicy {
- SET = 1;
- ADD = 2;
- REPLACE = 3;
- CAS = 4;
- }
- repeated group Item = 1 {
- required bytes key = 2;
- required bytes value = 3;
-
- optional fixed32 flags = 4;
- optional SetPolicy set_policy = 5 [default = SET];
- optional fixed32 expiration_time = 6 [default = 0];
-
- optional fixed64 cas_id = 8;
- optional bool for_cas = 9;
- }
- optional string name_space = 7 [default = ""];
- optional AppOverride override = 10;
-}
-
-message MemcacheSetResponse {
- enum SetStatusCode {
- STORED = 1;
- NOT_STORED = 2;
- ERROR = 3;
- EXISTS = 4;
- }
- repeated SetStatusCode set_status = 1;
-}
-
-message MemcacheDeleteRequest {
- repeated group Item = 1 {
- required bytes key = 2;
- optional fixed32 delete_time = 3 [default = 0];
- }
- optional string name_space = 4 [default = ""];
- optional AppOverride override = 5;
-}
-
-message MemcacheDeleteResponse {
- enum DeleteStatusCode {
- DELETED = 1;
- NOT_FOUND = 2;
- }
- repeated DeleteStatusCode delete_status = 1;
-}
-
-message MemcacheIncrementRequest {
- enum Direction {
- INCREMENT = 1;
- DECREMENT = 2;
- }
- required bytes key = 1;
- optional string name_space = 4 [default = ""];
-
- optional uint64 delta = 2 [default = 1];
- optional Direction direction = 3 [default = INCREMENT];
-
- optional uint64 initial_value = 5;
- optional fixed32 initial_flags = 6;
- optional AppOverride override = 7;
-}
-
-message MemcacheIncrementResponse {
- enum IncrementStatusCode {
- OK = 1;
- NOT_CHANGED = 2;
- ERROR = 3;
- }
-
- optional uint64 new_value = 1;
- optional IncrementStatusCode increment_status = 2;
-}
-
-message MemcacheBatchIncrementRequest {
- optional string name_space = 1 [default = ""];
- repeated MemcacheIncrementRequest item = 2;
- optional AppOverride override = 3;
-}
-
-message MemcacheBatchIncrementResponse {
- repeated MemcacheIncrementResponse item = 1;
-}
-
-message MemcacheFlushRequest {
- optional AppOverride override = 1;
-}
-
-message MemcacheFlushResponse {
-}
-
-message MemcacheStatsRequest {
- optional AppOverride override = 1;
-}
-
-message MergedNamespaceStats {
- required uint64 hits = 1;
- required uint64 misses = 2;
- required uint64 byte_hits = 3;
-
- required uint64 items = 4;
- required uint64 bytes = 5;
-
- required fixed32 oldest_item_age = 6;
-}
-
-message MemcacheStatsResponse {
- optional MergedNamespaceStats stats = 1;
-}
-
-message MemcacheGrabTailRequest {
- required int32 item_count = 1;
- optional string name_space = 2 [default = ""];
- optional AppOverride override = 3;
-}
-
-message MemcacheGrabTailResponse {
- repeated group Item = 1 {
- required bytes value = 2;
- optional fixed32 flags = 3;
- }
-}
diff --git a/vendor/google.golang.org/appengine/internal/metadata.go b/vendor/google.golang.org/appengine/internal/metadata.go
deleted file mode 100644
index b68fb75..0000000
--- a/vendor/google.golang.org/appengine/internal/metadata.go
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-// This file has code for accessing metadata.
-//
-// References:
-// https://cloud.google.com/compute/docs/metadata
-
-import (
- "fmt"
- "io/ioutil"
- "log"
- "net/http"
- "net/url"
-)
-
-const (
- metadataHost = "metadata"
- metadataPath = "/computeMetadata/v1/"
-)
-
-var (
- metadataRequestHeaders = http.Header{
- "X-Google-Metadata-Request": []string{"True"},
- }
-)
-
-// TODO(dsymonds): Do we need to support default values, like Python?
-func mustGetMetadata(key string) []byte {
- b, err := getMetadata(key)
- if err != nil {
- log.Fatalf("Metadata fetch failed: %v", err)
- }
- return b
-}
-
-func getMetadata(key string) ([]byte, error) {
- // TODO(dsymonds): May need to use url.Parse to support keys with query args.
- req := &http.Request{
- Method: "GET",
- URL: &url.URL{
- Scheme: "http",
- Host: metadataHost,
- Path: metadataPath + key,
- },
- Header: metadataRequestHeaders,
- Host: metadataHost,
- }
- resp, err := http.DefaultClient.Do(req)
- if err != nil {
- return nil, err
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, fmt.Errorf("metadata server returned HTTP %d", resp.StatusCode)
- }
- return ioutil.ReadAll(resp.Body)
-}
diff --git a/vendor/google.golang.org/appengine/internal/modules/modules_service.pb.go b/vendor/google.golang.org/appengine/internal/modules/modules_service.pb.go
deleted file mode 100644
index a0145ed..0000000
--- a/vendor/google.golang.org/appengine/internal/modules/modules_service.pb.go
+++ /dev/null
@@ -1,375 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/modules/modules_service.proto
-// DO NOT EDIT!
-
-/*
-Package modules is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/modules/modules_service.proto
-
-It has these top-level messages:
- ModulesServiceError
- GetModulesRequest
- GetModulesResponse
- GetVersionsRequest
- GetVersionsResponse
- GetDefaultVersionRequest
- GetDefaultVersionResponse
- GetNumInstancesRequest
- GetNumInstancesResponse
- SetNumInstancesRequest
- SetNumInstancesResponse
- StartModuleRequest
- StartModuleResponse
- StopModuleRequest
- StopModuleResponse
- GetHostnameRequest
- GetHostnameResponse
-*/
-package modules
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type ModulesServiceError_ErrorCode int32
-
-const (
- ModulesServiceError_OK ModulesServiceError_ErrorCode = 0
- ModulesServiceError_INVALID_MODULE ModulesServiceError_ErrorCode = 1
- ModulesServiceError_INVALID_VERSION ModulesServiceError_ErrorCode = 2
- ModulesServiceError_INVALID_INSTANCES ModulesServiceError_ErrorCode = 3
- ModulesServiceError_TRANSIENT_ERROR ModulesServiceError_ErrorCode = 4
- ModulesServiceError_UNEXPECTED_STATE ModulesServiceError_ErrorCode = 5
-)
-
-var ModulesServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INVALID_MODULE",
- 2: "INVALID_VERSION",
- 3: "INVALID_INSTANCES",
- 4: "TRANSIENT_ERROR",
- 5: "UNEXPECTED_STATE",
-}
-var ModulesServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INVALID_MODULE": 1,
- "INVALID_VERSION": 2,
- "INVALID_INSTANCES": 3,
- "TRANSIENT_ERROR": 4,
- "UNEXPECTED_STATE": 5,
-}
-
-func (x ModulesServiceError_ErrorCode) Enum() *ModulesServiceError_ErrorCode {
- p := new(ModulesServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x ModulesServiceError_ErrorCode) String() string {
- return proto.EnumName(ModulesServiceError_ErrorCode_name, int32(x))
-}
-func (x *ModulesServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ModulesServiceError_ErrorCode_value, data, "ModulesServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = ModulesServiceError_ErrorCode(value)
- return nil
-}
-
-type ModulesServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ModulesServiceError) Reset() { *m = ModulesServiceError{} }
-func (m *ModulesServiceError) String() string { return proto.CompactTextString(m) }
-func (*ModulesServiceError) ProtoMessage() {}
-
-type GetModulesRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetModulesRequest) Reset() { *m = GetModulesRequest{} }
-func (m *GetModulesRequest) String() string { return proto.CompactTextString(m) }
-func (*GetModulesRequest) ProtoMessage() {}
-
-type GetModulesResponse struct {
- Module []string `protobuf:"bytes,1,rep,name=module" json:"module,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetModulesResponse) Reset() { *m = GetModulesResponse{} }
-func (m *GetModulesResponse) String() string { return proto.CompactTextString(m) }
-func (*GetModulesResponse) ProtoMessage() {}
-
-func (m *GetModulesResponse) GetModule() []string {
- if m != nil {
- return m.Module
- }
- return nil
-}
-
-type GetVersionsRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetVersionsRequest) Reset() { *m = GetVersionsRequest{} }
-func (m *GetVersionsRequest) String() string { return proto.CompactTextString(m) }
-func (*GetVersionsRequest) ProtoMessage() {}
-
-func (m *GetVersionsRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-type GetVersionsResponse struct {
- Version []string `protobuf:"bytes,1,rep,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetVersionsResponse) Reset() { *m = GetVersionsResponse{} }
-func (m *GetVersionsResponse) String() string { return proto.CompactTextString(m) }
-func (*GetVersionsResponse) ProtoMessage() {}
-
-func (m *GetVersionsResponse) GetVersion() []string {
- if m != nil {
- return m.Version
- }
- return nil
-}
-
-type GetDefaultVersionRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetDefaultVersionRequest) Reset() { *m = GetDefaultVersionRequest{} }
-func (m *GetDefaultVersionRequest) String() string { return proto.CompactTextString(m) }
-func (*GetDefaultVersionRequest) ProtoMessage() {}
-
-func (m *GetDefaultVersionRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-type GetDefaultVersionResponse struct {
- Version *string `protobuf:"bytes,1,req,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetDefaultVersionResponse) Reset() { *m = GetDefaultVersionResponse{} }
-func (m *GetDefaultVersionResponse) String() string { return proto.CompactTextString(m) }
-func (*GetDefaultVersionResponse) ProtoMessage() {}
-
-func (m *GetDefaultVersionResponse) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-type GetNumInstancesRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- Version *string `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetNumInstancesRequest) Reset() { *m = GetNumInstancesRequest{} }
-func (m *GetNumInstancesRequest) String() string { return proto.CompactTextString(m) }
-func (*GetNumInstancesRequest) ProtoMessage() {}
-
-func (m *GetNumInstancesRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-func (m *GetNumInstancesRequest) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-type GetNumInstancesResponse struct {
- Instances *int64 `protobuf:"varint,1,req,name=instances" json:"instances,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetNumInstancesResponse) Reset() { *m = GetNumInstancesResponse{} }
-func (m *GetNumInstancesResponse) String() string { return proto.CompactTextString(m) }
-func (*GetNumInstancesResponse) ProtoMessage() {}
-
-func (m *GetNumInstancesResponse) GetInstances() int64 {
- if m != nil && m.Instances != nil {
- return *m.Instances
- }
- return 0
-}
-
-type SetNumInstancesRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- Version *string `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
- Instances *int64 `protobuf:"varint,3,req,name=instances" json:"instances,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SetNumInstancesRequest) Reset() { *m = SetNumInstancesRequest{} }
-func (m *SetNumInstancesRequest) String() string { return proto.CompactTextString(m) }
-func (*SetNumInstancesRequest) ProtoMessage() {}
-
-func (m *SetNumInstancesRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-func (m *SetNumInstancesRequest) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-func (m *SetNumInstancesRequest) GetInstances() int64 {
- if m != nil && m.Instances != nil {
- return *m.Instances
- }
- return 0
-}
-
-type SetNumInstancesResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SetNumInstancesResponse) Reset() { *m = SetNumInstancesResponse{} }
-func (m *SetNumInstancesResponse) String() string { return proto.CompactTextString(m) }
-func (*SetNumInstancesResponse) ProtoMessage() {}
-
-type StartModuleRequest struct {
- Module *string `protobuf:"bytes,1,req,name=module" json:"module,omitempty"`
- Version *string `protobuf:"bytes,2,req,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StartModuleRequest) Reset() { *m = StartModuleRequest{} }
-func (m *StartModuleRequest) String() string { return proto.CompactTextString(m) }
-func (*StartModuleRequest) ProtoMessage() {}
-
-func (m *StartModuleRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-func (m *StartModuleRequest) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-type StartModuleResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StartModuleResponse) Reset() { *m = StartModuleResponse{} }
-func (m *StartModuleResponse) String() string { return proto.CompactTextString(m) }
-func (*StartModuleResponse) ProtoMessage() {}
-
-type StopModuleRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- Version *string `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StopModuleRequest) Reset() { *m = StopModuleRequest{} }
-func (m *StopModuleRequest) String() string { return proto.CompactTextString(m) }
-func (*StopModuleRequest) ProtoMessage() {}
-
-func (m *StopModuleRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-func (m *StopModuleRequest) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-type StopModuleResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StopModuleResponse) Reset() { *m = StopModuleResponse{} }
-func (m *StopModuleResponse) String() string { return proto.CompactTextString(m) }
-func (*StopModuleResponse) ProtoMessage() {}
-
-type GetHostnameRequest struct {
- Module *string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
- Version *string `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
- Instance *string `protobuf:"bytes,3,opt,name=instance" json:"instance,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetHostnameRequest) Reset() { *m = GetHostnameRequest{} }
-func (m *GetHostnameRequest) String() string { return proto.CompactTextString(m) }
-func (*GetHostnameRequest) ProtoMessage() {}
-
-func (m *GetHostnameRequest) GetModule() string {
- if m != nil && m.Module != nil {
- return *m.Module
- }
- return ""
-}
-
-func (m *GetHostnameRequest) GetVersion() string {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return ""
-}
-
-func (m *GetHostnameRequest) GetInstance() string {
- if m != nil && m.Instance != nil {
- return *m.Instance
- }
- return ""
-}
-
-type GetHostnameResponse struct {
- Hostname *string `protobuf:"bytes,1,req,name=hostname" json:"hostname,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetHostnameResponse) Reset() { *m = GetHostnameResponse{} }
-func (m *GetHostnameResponse) String() string { return proto.CompactTextString(m) }
-func (*GetHostnameResponse) ProtoMessage() {}
-
-func (m *GetHostnameResponse) GetHostname() string {
- if m != nil && m.Hostname != nil {
- return *m.Hostname
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/modules/modules_service.proto b/vendor/google.golang.org/appengine/internal/modules/modules_service.proto
deleted file mode 100644
index d29f006..0000000
--- a/vendor/google.golang.org/appengine/internal/modules/modules_service.proto
+++ /dev/null
@@ -1,80 +0,0 @@
-syntax = "proto2";
-option go_package = "modules";
-
-package appengine;
-
-message ModulesServiceError {
- enum ErrorCode {
- OK = 0;
- INVALID_MODULE = 1;
- INVALID_VERSION = 2;
- INVALID_INSTANCES = 3;
- TRANSIENT_ERROR = 4;
- UNEXPECTED_STATE = 5;
- }
-}
-
-message GetModulesRequest {
-}
-
-message GetModulesResponse {
- repeated string module = 1;
-}
-
-message GetVersionsRequest {
- optional string module = 1;
-}
-
-message GetVersionsResponse {
- repeated string version = 1;
-}
-
-message GetDefaultVersionRequest {
- optional string module = 1;
-}
-
-message GetDefaultVersionResponse {
- required string version = 1;
-}
-
-message GetNumInstancesRequest {
- optional string module = 1;
- optional string version = 2;
-}
-
-message GetNumInstancesResponse {
- required int64 instances = 1;
-}
-
-message SetNumInstancesRequest {
- optional string module = 1;
- optional string version = 2;
- required int64 instances = 3;
-}
-
-message SetNumInstancesResponse {}
-
-message StartModuleRequest {
- required string module = 1;
- required string version = 2;
-}
-
-message StartModuleResponse {}
-
-message StopModuleRequest {
- optional string module = 1;
- optional string version = 2;
-}
-
-message StopModuleResponse {}
-
-message GetHostnameRequest {
- optional string module = 1;
- optional string version = 2;
- optional string instance = 3;
-}
-
-message GetHostnameResponse {
- required string hostname = 1;
-}
-
diff --git a/vendor/google.golang.org/appengine/internal/net.go b/vendor/google.golang.org/appengine/internal/net.go
deleted file mode 100644
index 3b94cf0..0000000
--- a/vendor/google.golang.org/appengine/internal/net.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-// This file implements a network dialer that limits the number of concurrent connections.
-// It is only used for API calls.
-
-import (
- "log"
- "net"
- "runtime"
- "sync"
- "time"
-)
-
-var limitSem = make(chan int, 100) // TODO(dsymonds): Use environment variable.
-
-func limitRelease() {
- // non-blocking
- select {
- case <-limitSem:
- default:
- // This should not normally happen.
- log.Print("appengine: unbalanced limitSem release!")
- }
-}
-
-func limitDial(network, addr string) (net.Conn, error) {
- limitSem <- 1
-
- // Dial with a timeout in case the API host is MIA.
- // The connection should normally be very fast.
- conn, err := net.DialTimeout(network, addr, 500*time.Millisecond)
- if err != nil {
- limitRelease()
- return nil, err
- }
- lc := &limitConn{Conn: conn}
- runtime.SetFinalizer(lc, (*limitConn).Close) // shouldn't usually be required
- return lc, nil
-}
-
-type limitConn struct {
- close sync.Once
- net.Conn
-}
-
-func (lc *limitConn) Close() error {
- defer lc.close.Do(func() {
- limitRelease()
- runtime.SetFinalizer(lc, nil)
- })
- return lc.Conn.Close()
-}
diff --git a/vendor/google.golang.org/appengine/internal/regen.sh b/vendor/google.golang.org/appengine/internal/regen.sh
deleted file mode 100644
index 2fdb546..0000000
--- a/vendor/google.golang.org/appengine/internal/regen.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash -e
-#
-# This script rebuilds the generated code for the protocol buffers.
-# To run this you will need protoc and goprotobuf installed;
-# see https://github.com/golang/protobuf for instructions.
-
-PKG=google.golang.org/appengine
-
-function die() {
- echo 1>&2 $*
- exit 1
-}
-
-# Sanity check that the right tools are accessible.
-for tool in go protoc protoc-gen-go; do
- q=$(which $tool) || die "didn't find $tool"
- echo 1>&2 "$tool: $q"
-done
-
-echo -n 1>&2 "finding package dir... "
-pkgdir=$(go list -f '{{.Dir}}' $PKG)
-echo 1>&2 $pkgdir
-base=$(echo $pkgdir | sed "s,/$PKG\$,,")
-echo 1>&2 "base: $base"
-cd $base
-
-# Run protoc once per package.
-for dir in $(find $PKG/internal -name '*.proto' | xargs dirname | sort | uniq); do
- echo 1>&2 "* $dir"
- protoc --go_out=. $dir/*.proto
-done
-
-for f in $(find $PKG/internal -name '*.pb.go'); do
- # Remove proto.RegisterEnum calls.
- # These cause duplicate registration panics when these packages
- # are used on classic App Engine. proto.RegisterEnum only affects
- # parsing the text format; we don't care about that.
- # https://code.google.com/p/googleappengine/issues/detail?id=11670#c17
- sed -i '/proto.RegisterEnum/d' $f
-done
diff --git a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go
deleted file mode 100644
index 526bd39..0000000
--- a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.pb.go
+++ /dev/null
@@ -1,231 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/remote_api/remote_api.proto
-// DO NOT EDIT!
-
-/*
-Package remote_api is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/remote_api/remote_api.proto
-
-It has these top-level messages:
- Request
- ApplicationError
- RpcError
- Response
-*/
-package remote_api
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type RpcError_ErrorCode int32
-
-const (
- RpcError_UNKNOWN RpcError_ErrorCode = 0
- RpcError_CALL_NOT_FOUND RpcError_ErrorCode = 1
- RpcError_PARSE_ERROR RpcError_ErrorCode = 2
- RpcError_SECURITY_VIOLATION RpcError_ErrorCode = 3
- RpcError_OVER_QUOTA RpcError_ErrorCode = 4
- RpcError_REQUEST_TOO_LARGE RpcError_ErrorCode = 5
- RpcError_CAPABILITY_DISABLED RpcError_ErrorCode = 6
- RpcError_FEATURE_DISABLED RpcError_ErrorCode = 7
- RpcError_BAD_REQUEST RpcError_ErrorCode = 8
- RpcError_RESPONSE_TOO_LARGE RpcError_ErrorCode = 9
- RpcError_CANCELLED RpcError_ErrorCode = 10
- RpcError_REPLAY_ERROR RpcError_ErrorCode = 11
- RpcError_DEADLINE_EXCEEDED RpcError_ErrorCode = 12
-)
-
-var RpcError_ErrorCode_name = map[int32]string{
- 0: "UNKNOWN",
- 1: "CALL_NOT_FOUND",
- 2: "PARSE_ERROR",
- 3: "SECURITY_VIOLATION",
- 4: "OVER_QUOTA",
- 5: "REQUEST_TOO_LARGE",
- 6: "CAPABILITY_DISABLED",
- 7: "FEATURE_DISABLED",
- 8: "BAD_REQUEST",
- 9: "RESPONSE_TOO_LARGE",
- 10: "CANCELLED",
- 11: "REPLAY_ERROR",
- 12: "DEADLINE_EXCEEDED",
-}
-var RpcError_ErrorCode_value = map[string]int32{
- "UNKNOWN": 0,
- "CALL_NOT_FOUND": 1,
- "PARSE_ERROR": 2,
- "SECURITY_VIOLATION": 3,
- "OVER_QUOTA": 4,
- "REQUEST_TOO_LARGE": 5,
- "CAPABILITY_DISABLED": 6,
- "FEATURE_DISABLED": 7,
- "BAD_REQUEST": 8,
- "RESPONSE_TOO_LARGE": 9,
- "CANCELLED": 10,
- "REPLAY_ERROR": 11,
- "DEADLINE_EXCEEDED": 12,
-}
-
-func (x RpcError_ErrorCode) Enum() *RpcError_ErrorCode {
- p := new(RpcError_ErrorCode)
- *p = x
- return p
-}
-func (x RpcError_ErrorCode) String() string {
- return proto.EnumName(RpcError_ErrorCode_name, int32(x))
-}
-func (x *RpcError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(RpcError_ErrorCode_value, data, "RpcError_ErrorCode")
- if err != nil {
- return err
- }
- *x = RpcError_ErrorCode(value)
- return nil
-}
-
-type Request struct {
- ServiceName *string `protobuf:"bytes,2,req,name=service_name" json:"service_name,omitempty"`
- Method *string `protobuf:"bytes,3,req,name=method" json:"method,omitempty"`
- Request []byte `protobuf:"bytes,4,req,name=request" json:"request,omitempty"`
- RequestId *string `protobuf:"bytes,5,opt,name=request_id" json:"request_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Request) Reset() { *m = Request{} }
-func (m *Request) String() string { return proto.CompactTextString(m) }
-func (*Request) ProtoMessage() {}
-
-func (m *Request) GetServiceName() string {
- if m != nil && m.ServiceName != nil {
- return *m.ServiceName
- }
- return ""
-}
-
-func (m *Request) GetMethod() string {
- if m != nil && m.Method != nil {
- return *m.Method
- }
- return ""
-}
-
-func (m *Request) GetRequest() []byte {
- if m != nil {
- return m.Request
- }
- return nil
-}
-
-func (m *Request) GetRequestId() string {
- if m != nil && m.RequestId != nil {
- return *m.RequestId
- }
- return ""
-}
-
-type ApplicationError struct {
- Code *int32 `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
- Detail *string `protobuf:"bytes,2,req,name=detail" json:"detail,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ApplicationError) Reset() { *m = ApplicationError{} }
-func (m *ApplicationError) String() string { return proto.CompactTextString(m) }
-func (*ApplicationError) ProtoMessage() {}
-
-func (m *ApplicationError) GetCode() int32 {
- if m != nil && m.Code != nil {
- return *m.Code
- }
- return 0
-}
-
-func (m *ApplicationError) GetDetail() string {
- if m != nil && m.Detail != nil {
- return *m.Detail
- }
- return ""
-}
-
-type RpcError struct {
- Code *int32 `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
- Detail *string `protobuf:"bytes,2,opt,name=detail" json:"detail,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *RpcError) Reset() { *m = RpcError{} }
-func (m *RpcError) String() string { return proto.CompactTextString(m) }
-func (*RpcError) ProtoMessage() {}
-
-func (m *RpcError) GetCode() int32 {
- if m != nil && m.Code != nil {
- return *m.Code
- }
- return 0
-}
-
-func (m *RpcError) GetDetail() string {
- if m != nil && m.Detail != nil {
- return *m.Detail
- }
- return ""
-}
-
-type Response struct {
- Response []byte `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
- Exception []byte `protobuf:"bytes,2,opt,name=exception" json:"exception,omitempty"`
- ApplicationError *ApplicationError `protobuf:"bytes,3,opt,name=application_error" json:"application_error,omitempty"`
- JavaException []byte `protobuf:"bytes,4,opt,name=java_exception" json:"java_exception,omitempty"`
- RpcError *RpcError `protobuf:"bytes,5,opt,name=rpc_error" json:"rpc_error,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Response) Reset() { *m = Response{} }
-func (m *Response) String() string { return proto.CompactTextString(m) }
-func (*Response) ProtoMessage() {}
-
-func (m *Response) GetResponse() []byte {
- if m != nil {
- return m.Response
- }
- return nil
-}
-
-func (m *Response) GetException() []byte {
- if m != nil {
- return m.Exception
- }
- return nil
-}
-
-func (m *Response) GetApplicationError() *ApplicationError {
- if m != nil {
- return m.ApplicationError
- }
- return nil
-}
-
-func (m *Response) GetJavaException() []byte {
- if m != nil {
- return m.JavaException
- }
- return nil
-}
-
-func (m *Response) GetRpcError() *RpcError {
- if m != nil {
- return m.RpcError
- }
- return nil
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto b/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto
deleted file mode 100644
index f21763a..0000000
--- a/vendor/google.golang.org/appengine/internal/remote_api/remote_api.proto
+++ /dev/null
@@ -1,44 +0,0 @@
-syntax = "proto2";
-option go_package = "remote_api";
-
-package remote_api;
-
-message Request {
- required string service_name = 2;
- required string method = 3;
- required bytes request = 4;
- optional string request_id = 5;
-}
-
-message ApplicationError {
- required int32 code = 1;
- required string detail = 2;
-}
-
-message RpcError {
- enum ErrorCode {
- UNKNOWN = 0;
- CALL_NOT_FOUND = 1;
- PARSE_ERROR = 2;
- SECURITY_VIOLATION = 3;
- OVER_QUOTA = 4;
- REQUEST_TOO_LARGE = 5;
- CAPABILITY_DISABLED = 6;
- FEATURE_DISABLED = 7;
- BAD_REQUEST = 8;
- RESPONSE_TOO_LARGE = 9;
- CANCELLED = 10;
- REPLAY_ERROR = 11;
- DEADLINE_EXCEEDED = 12;
- }
- required int32 code = 1;
- optional string detail = 2;
-}
-
-message Response {
- optional bytes response = 1;
- optional bytes exception = 2;
- optional ApplicationError application_error = 3;
- optional bytes java_exception = 4;
- optional RpcError rpc_error = 5;
-}
diff --git a/vendor/google.golang.org/appengine/internal/search/search.pb.go b/vendor/google.golang.org/appengine/internal/search/search.pb.go
deleted file mode 100644
index 7d8d11d..0000000
--- a/vendor/google.golang.org/appengine/internal/search/search.pb.go
+++ /dev/null
@@ -1,2127 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/search/search.proto
-// DO NOT EDIT!
-
-/*
-Package search is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/search/search.proto
-
-It has these top-level messages:
- Scope
- Entry
- AccessControlList
- FieldValue
- Field
- FieldTypes
- IndexShardSettings
- FacetValue
- Facet
- DocumentMetadata
- Document
- SearchServiceError
- RequestStatus
- IndexSpec
- IndexMetadata
- IndexDocumentParams
- IndexDocumentRequest
- IndexDocumentResponse
- DeleteDocumentParams
- DeleteDocumentRequest
- DeleteDocumentResponse
- ListDocumentsParams
- ListDocumentsRequest
- ListDocumentsResponse
- ListIndexesParams
- ListIndexesRequest
- ListIndexesResponse
- DeleteSchemaParams
- DeleteSchemaRequest
- DeleteSchemaResponse
- SortSpec
- ScorerSpec
- FieldSpec
- FacetRange
- FacetRequestParam
- FacetAutoDetectParam
- FacetRequest
- FacetRefinement
- SearchParams
- SearchRequest
- FacetResultValue
- FacetResult
- SearchResult
- SearchResponse
-*/
-package search
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type Scope_Type int32
-
-const (
- Scope_USER_BY_CANONICAL_ID Scope_Type = 1
- Scope_USER_BY_EMAIL Scope_Type = 2
- Scope_GROUP_BY_CANONICAL_ID Scope_Type = 3
- Scope_GROUP_BY_EMAIL Scope_Type = 4
- Scope_GROUP_BY_DOMAIN Scope_Type = 5
- Scope_ALL_USERS Scope_Type = 6
- Scope_ALL_AUTHENTICATED_USERS Scope_Type = 7
-)
-
-var Scope_Type_name = map[int32]string{
- 1: "USER_BY_CANONICAL_ID",
- 2: "USER_BY_EMAIL",
- 3: "GROUP_BY_CANONICAL_ID",
- 4: "GROUP_BY_EMAIL",
- 5: "GROUP_BY_DOMAIN",
- 6: "ALL_USERS",
- 7: "ALL_AUTHENTICATED_USERS",
-}
-var Scope_Type_value = map[string]int32{
- "USER_BY_CANONICAL_ID": 1,
- "USER_BY_EMAIL": 2,
- "GROUP_BY_CANONICAL_ID": 3,
- "GROUP_BY_EMAIL": 4,
- "GROUP_BY_DOMAIN": 5,
- "ALL_USERS": 6,
- "ALL_AUTHENTICATED_USERS": 7,
-}
-
-func (x Scope_Type) Enum() *Scope_Type {
- p := new(Scope_Type)
- *p = x
- return p
-}
-func (x Scope_Type) String() string {
- return proto.EnumName(Scope_Type_name, int32(x))
-}
-func (x *Scope_Type) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Scope_Type_value, data, "Scope_Type")
- if err != nil {
- return err
- }
- *x = Scope_Type(value)
- return nil
-}
-
-type Entry_Permission int32
-
-const (
- Entry_READ Entry_Permission = 1
- Entry_WRITE Entry_Permission = 2
- Entry_FULL_CONTROL Entry_Permission = 3
-)
-
-var Entry_Permission_name = map[int32]string{
- 1: "READ",
- 2: "WRITE",
- 3: "FULL_CONTROL",
-}
-var Entry_Permission_value = map[string]int32{
- "READ": 1,
- "WRITE": 2,
- "FULL_CONTROL": 3,
-}
-
-func (x Entry_Permission) Enum() *Entry_Permission {
- p := new(Entry_Permission)
- *p = x
- return p
-}
-func (x Entry_Permission) String() string {
- return proto.EnumName(Entry_Permission_name, int32(x))
-}
-func (x *Entry_Permission) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Entry_Permission_value, data, "Entry_Permission")
- if err != nil {
- return err
- }
- *x = Entry_Permission(value)
- return nil
-}
-
-type FieldValue_ContentType int32
-
-const (
- FieldValue_TEXT FieldValue_ContentType = 0
- FieldValue_HTML FieldValue_ContentType = 1
- FieldValue_ATOM FieldValue_ContentType = 2
- FieldValue_DATE FieldValue_ContentType = 3
- FieldValue_NUMBER FieldValue_ContentType = 4
- FieldValue_GEO FieldValue_ContentType = 5
-)
-
-var FieldValue_ContentType_name = map[int32]string{
- 0: "TEXT",
- 1: "HTML",
- 2: "ATOM",
- 3: "DATE",
- 4: "NUMBER",
- 5: "GEO",
-}
-var FieldValue_ContentType_value = map[string]int32{
- "TEXT": 0,
- "HTML": 1,
- "ATOM": 2,
- "DATE": 3,
- "NUMBER": 4,
- "GEO": 5,
-}
-
-func (x FieldValue_ContentType) Enum() *FieldValue_ContentType {
- p := new(FieldValue_ContentType)
- *p = x
- return p
-}
-func (x FieldValue_ContentType) String() string {
- return proto.EnumName(FieldValue_ContentType_name, int32(x))
-}
-func (x *FieldValue_ContentType) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(FieldValue_ContentType_value, data, "FieldValue_ContentType")
- if err != nil {
- return err
- }
- *x = FieldValue_ContentType(value)
- return nil
-}
-
-type FacetValue_ContentType int32
-
-const (
- FacetValue_ATOM FacetValue_ContentType = 2
- FacetValue_NUMBER FacetValue_ContentType = 4
-)
-
-var FacetValue_ContentType_name = map[int32]string{
- 2: "ATOM",
- 4: "NUMBER",
-}
-var FacetValue_ContentType_value = map[string]int32{
- "ATOM": 2,
- "NUMBER": 4,
-}
-
-func (x FacetValue_ContentType) Enum() *FacetValue_ContentType {
- p := new(FacetValue_ContentType)
- *p = x
- return p
-}
-func (x FacetValue_ContentType) String() string {
- return proto.EnumName(FacetValue_ContentType_name, int32(x))
-}
-func (x *FacetValue_ContentType) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(FacetValue_ContentType_value, data, "FacetValue_ContentType")
- if err != nil {
- return err
- }
- *x = FacetValue_ContentType(value)
- return nil
-}
-
-type Document_Storage int32
-
-const (
- Document_DISK Document_Storage = 0
-)
-
-var Document_Storage_name = map[int32]string{
- 0: "DISK",
-}
-var Document_Storage_value = map[string]int32{
- "DISK": 0,
-}
-
-func (x Document_Storage) Enum() *Document_Storage {
- p := new(Document_Storage)
- *p = x
- return p
-}
-func (x Document_Storage) String() string {
- return proto.EnumName(Document_Storage_name, int32(x))
-}
-func (x *Document_Storage) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(Document_Storage_value, data, "Document_Storage")
- if err != nil {
- return err
- }
- *x = Document_Storage(value)
- return nil
-}
-
-type SearchServiceError_ErrorCode int32
-
-const (
- SearchServiceError_OK SearchServiceError_ErrorCode = 0
- SearchServiceError_INVALID_REQUEST SearchServiceError_ErrorCode = 1
- SearchServiceError_TRANSIENT_ERROR SearchServiceError_ErrorCode = 2
- SearchServiceError_INTERNAL_ERROR SearchServiceError_ErrorCode = 3
- SearchServiceError_PERMISSION_DENIED SearchServiceError_ErrorCode = 4
- SearchServiceError_TIMEOUT SearchServiceError_ErrorCode = 5
- SearchServiceError_CONCURRENT_TRANSACTION SearchServiceError_ErrorCode = 6
-)
-
-var SearchServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INVALID_REQUEST",
- 2: "TRANSIENT_ERROR",
- 3: "INTERNAL_ERROR",
- 4: "PERMISSION_DENIED",
- 5: "TIMEOUT",
- 6: "CONCURRENT_TRANSACTION",
-}
-var SearchServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INVALID_REQUEST": 1,
- "TRANSIENT_ERROR": 2,
- "INTERNAL_ERROR": 3,
- "PERMISSION_DENIED": 4,
- "TIMEOUT": 5,
- "CONCURRENT_TRANSACTION": 6,
-}
-
-func (x SearchServiceError_ErrorCode) Enum() *SearchServiceError_ErrorCode {
- p := new(SearchServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x SearchServiceError_ErrorCode) String() string {
- return proto.EnumName(SearchServiceError_ErrorCode_name, int32(x))
-}
-func (x *SearchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SearchServiceError_ErrorCode_value, data, "SearchServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = SearchServiceError_ErrorCode(value)
- return nil
-}
-
-type IndexSpec_Consistency int32
-
-const (
- IndexSpec_GLOBAL IndexSpec_Consistency = 0
- IndexSpec_PER_DOCUMENT IndexSpec_Consistency = 1
-)
-
-var IndexSpec_Consistency_name = map[int32]string{
- 0: "GLOBAL",
- 1: "PER_DOCUMENT",
-}
-var IndexSpec_Consistency_value = map[string]int32{
- "GLOBAL": 0,
- "PER_DOCUMENT": 1,
-}
-
-func (x IndexSpec_Consistency) Enum() *IndexSpec_Consistency {
- p := new(IndexSpec_Consistency)
- *p = x
- return p
-}
-func (x IndexSpec_Consistency) String() string {
- return proto.EnumName(IndexSpec_Consistency_name, int32(x))
-}
-func (x *IndexSpec_Consistency) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(IndexSpec_Consistency_value, data, "IndexSpec_Consistency")
- if err != nil {
- return err
- }
- *x = IndexSpec_Consistency(value)
- return nil
-}
-
-type IndexSpec_Source int32
-
-const (
- IndexSpec_SEARCH IndexSpec_Source = 0
- IndexSpec_DATASTORE IndexSpec_Source = 1
- IndexSpec_CLOUD_STORAGE IndexSpec_Source = 2
-)
-
-var IndexSpec_Source_name = map[int32]string{
- 0: "SEARCH",
- 1: "DATASTORE",
- 2: "CLOUD_STORAGE",
-}
-var IndexSpec_Source_value = map[string]int32{
- "SEARCH": 0,
- "DATASTORE": 1,
- "CLOUD_STORAGE": 2,
-}
-
-func (x IndexSpec_Source) Enum() *IndexSpec_Source {
- p := new(IndexSpec_Source)
- *p = x
- return p
-}
-func (x IndexSpec_Source) String() string {
- return proto.EnumName(IndexSpec_Source_name, int32(x))
-}
-func (x *IndexSpec_Source) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(IndexSpec_Source_value, data, "IndexSpec_Source")
- if err != nil {
- return err
- }
- *x = IndexSpec_Source(value)
- return nil
-}
-
-type IndexSpec_Mode int32
-
-const (
- IndexSpec_PRIORITY IndexSpec_Mode = 0
- IndexSpec_BACKGROUND IndexSpec_Mode = 1
-)
-
-var IndexSpec_Mode_name = map[int32]string{
- 0: "PRIORITY",
- 1: "BACKGROUND",
-}
-var IndexSpec_Mode_value = map[string]int32{
- "PRIORITY": 0,
- "BACKGROUND": 1,
-}
-
-func (x IndexSpec_Mode) Enum() *IndexSpec_Mode {
- p := new(IndexSpec_Mode)
- *p = x
- return p
-}
-func (x IndexSpec_Mode) String() string {
- return proto.EnumName(IndexSpec_Mode_name, int32(x))
-}
-func (x *IndexSpec_Mode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(IndexSpec_Mode_value, data, "IndexSpec_Mode")
- if err != nil {
- return err
- }
- *x = IndexSpec_Mode(value)
- return nil
-}
-
-type IndexDocumentParams_Freshness int32
-
-const (
- IndexDocumentParams_SYNCHRONOUSLY IndexDocumentParams_Freshness = 0
- IndexDocumentParams_WHEN_CONVENIENT IndexDocumentParams_Freshness = 1
-)
-
-var IndexDocumentParams_Freshness_name = map[int32]string{
- 0: "SYNCHRONOUSLY",
- 1: "WHEN_CONVENIENT",
-}
-var IndexDocumentParams_Freshness_value = map[string]int32{
- "SYNCHRONOUSLY": 0,
- "WHEN_CONVENIENT": 1,
-}
-
-func (x IndexDocumentParams_Freshness) Enum() *IndexDocumentParams_Freshness {
- p := new(IndexDocumentParams_Freshness)
- *p = x
- return p
-}
-func (x IndexDocumentParams_Freshness) String() string {
- return proto.EnumName(IndexDocumentParams_Freshness_name, int32(x))
-}
-func (x *IndexDocumentParams_Freshness) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(IndexDocumentParams_Freshness_value, data, "IndexDocumentParams_Freshness")
- if err != nil {
- return err
- }
- *x = IndexDocumentParams_Freshness(value)
- return nil
-}
-
-type ScorerSpec_Scorer int32
-
-const (
- ScorerSpec_RESCORING_MATCH_SCORER ScorerSpec_Scorer = 0
- ScorerSpec_MATCH_SCORER ScorerSpec_Scorer = 2
-)
-
-var ScorerSpec_Scorer_name = map[int32]string{
- 0: "RESCORING_MATCH_SCORER",
- 2: "MATCH_SCORER",
-}
-var ScorerSpec_Scorer_value = map[string]int32{
- "RESCORING_MATCH_SCORER": 0,
- "MATCH_SCORER": 2,
-}
-
-func (x ScorerSpec_Scorer) Enum() *ScorerSpec_Scorer {
- p := new(ScorerSpec_Scorer)
- *p = x
- return p
-}
-func (x ScorerSpec_Scorer) String() string {
- return proto.EnumName(ScorerSpec_Scorer_name, int32(x))
-}
-func (x *ScorerSpec_Scorer) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ScorerSpec_Scorer_value, data, "ScorerSpec_Scorer")
- if err != nil {
- return err
- }
- *x = ScorerSpec_Scorer(value)
- return nil
-}
-
-type SearchParams_CursorType int32
-
-const (
- SearchParams_NONE SearchParams_CursorType = 0
- SearchParams_SINGLE SearchParams_CursorType = 1
- SearchParams_PER_RESULT SearchParams_CursorType = 2
-)
-
-var SearchParams_CursorType_name = map[int32]string{
- 0: "NONE",
- 1: "SINGLE",
- 2: "PER_RESULT",
-}
-var SearchParams_CursorType_value = map[string]int32{
- "NONE": 0,
- "SINGLE": 1,
- "PER_RESULT": 2,
-}
-
-func (x SearchParams_CursorType) Enum() *SearchParams_CursorType {
- p := new(SearchParams_CursorType)
- *p = x
- return p
-}
-func (x SearchParams_CursorType) String() string {
- return proto.EnumName(SearchParams_CursorType_name, int32(x))
-}
-func (x *SearchParams_CursorType) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SearchParams_CursorType_value, data, "SearchParams_CursorType")
- if err != nil {
- return err
- }
- *x = SearchParams_CursorType(value)
- return nil
-}
-
-type SearchParams_ParsingMode int32
-
-const (
- SearchParams_STRICT SearchParams_ParsingMode = 0
- SearchParams_RELAXED SearchParams_ParsingMode = 1
-)
-
-var SearchParams_ParsingMode_name = map[int32]string{
- 0: "STRICT",
- 1: "RELAXED",
-}
-var SearchParams_ParsingMode_value = map[string]int32{
- "STRICT": 0,
- "RELAXED": 1,
-}
-
-func (x SearchParams_ParsingMode) Enum() *SearchParams_ParsingMode {
- p := new(SearchParams_ParsingMode)
- *p = x
- return p
-}
-func (x SearchParams_ParsingMode) String() string {
- return proto.EnumName(SearchParams_ParsingMode_name, int32(x))
-}
-func (x *SearchParams_ParsingMode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SearchParams_ParsingMode_value, data, "SearchParams_ParsingMode")
- if err != nil {
- return err
- }
- *x = SearchParams_ParsingMode(value)
- return nil
-}
-
-type Scope struct {
- Type *Scope_Type `protobuf:"varint,1,opt,name=type,enum=search.Scope_Type" json:"type,omitempty"`
- Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Scope) Reset() { *m = Scope{} }
-func (m *Scope) String() string { return proto.CompactTextString(m) }
-func (*Scope) ProtoMessage() {}
-
-func (m *Scope) GetType() Scope_Type {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return Scope_USER_BY_CANONICAL_ID
-}
-
-func (m *Scope) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-type Entry struct {
- Scope *Scope `protobuf:"bytes,1,opt,name=scope" json:"scope,omitempty"`
- Permission *Entry_Permission `protobuf:"varint,2,opt,name=permission,enum=search.Entry_Permission" json:"permission,omitempty"`
- DisplayName *string `protobuf:"bytes,3,opt,name=display_name" json:"display_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Entry) Reset() { *m = Entry{} }
-func (m *Entry) String() string { return proto.CompactTextString(m) }
-func (*Entry) ProtoMessage() {}
-
-func (m *Entry) GetScope() *Scope {
- if m != nil {
- return m.Scope
- }
- return nil
-}
-
-func (m *Entry) GetPermission() Entry_Permission {
- if m != nil && m.Permission != nil {
- return *m.Permission
- }
- return Entry_READ
-}
-
-func (m *Entry) GetDisplayName() string {
- if m != nil && m.DisplayName != nil {
- return *m.DisplayName
- }
- return ""
-}
-
-type AccessControlList struct {
- Owner *string `protobuf:"bytes,1,opt,name=owner" json:"owner,omitempty"`
- Entries []*Entry `protobuf:"bytes,2,rep,name=entries" json:"entries,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AccessControlList) Reset() { *m = AccessControlList{} }
-func (m *AccessControlList) String() string { return proto.CompactTextString(m) }
-func (*AccessControlList) ProtoMessage() {}
-
-func (m *AccessControlList) GetOwner() string {
- if m != nil && m.Owner != nil {
- return *m.Owner
- }
- return ""
-}
-
-func (m *AccessControlList) GetEntries() []*Entry {
- if m != nil {
- return m.Entries
- }
- return nil
-}
-
-type FieldValue struct {
- Type *FieldValue_ContentType `protobuf:"varint,1,opt,name=type,enum=search.FieldValue_ContentType,def=0" json:"type,omitempty"`
- Language *string `protobuf:"bytes,2,opt,name=language,def=en" json:"language,omitempty"`
- StringValue *string `protobuf:"bytes,3,opt,name=string_value" json:"string_value,omitempty"`
- Geo *FieldValue_Geo `protobuf:"group,4,opt,name=Geo" json:"geo,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FieldValue) Reset() { *m = FieldValue{} }
-func (m *FieldValue) String() string { return proto.CompactTextString(m) }
-func (*FieldValue) ProtoMessage() {}
-
-const Default_FieldValue_Type FieldValue_ContentType = FieldValue_TEXT
-const Default_FieldValue_Language string = "en"
-
-func (m *FieldValue) GetType() FieldValue_ContentType {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return Default_FieldValue_Type
-}
-
-func (m *FieldValue) GetLanguage() string {
- if m != nil && m.Language != nil {
- return *m.Language
- }
- return Default_FieldValue_Language
-}
-
-func (m *FieldValue) GetStringValue() string {
- if m != nil && m.StringValue != nil {
- return *m.StringValue
- }
- return ""
-}
-
-func (m *FieldValue) GetGeo() *FieldValue_Geo {
- if m != nil {
- return m.Geo
- }
- return nil
-}
-
-type FieldValue_Geo struct {
- Lat *float64 `protobuf:"fixed64,5,req,name=lat" json:"lat,omitempty"`
- Lng *float64 `protobuf:"fixed64,6,req,name=lng" json:"lng,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FieldValue_Geo) Reset() { *m = FieldValue_Geo{} }
-func (m *FieldValue_Geo) String() string { return proto.CompactTextString(m) }
-func (*FieldValue_Geo) ProtoMessage() {}
-
-func (m *FieldValue_Geo) GetLat() float64 {
- if m != nil && m.Lat != nil {
- return *m.Lat
- }
- return 0
-}
-
-func (m *FieldValue_Geo) GetLng() float64 {
- if m != nil && m.Lng != nil {
- return *m.Lng
- }
- return 0
-}
-
-type Field struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Value *FieldValue `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Field) Reset() { *m = Field{} }
-func (m *Field) String() string { return proto.CompactTextString(m) }
-func (*Field) ProtoMessage() {}
-
-func (m *Field) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *Field) GetValue() *FieldValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type FieldTypes struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Type []FieldValue_ContentType `protobuf:"varint,2,rep,name=type,enum=search.FieldValue_ContentType" json:"type,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FieldTypes) Reset() { *m = FieldTypes{} }
-func (m *FieldTypes) String() string { return proto.CompactTextString(m) }
-func (*FieldTypes) ProtoMessage() {}
-
-func (m *FieldTypes) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FieldTypes) GetType() []FieldValue_ContentType {
- if m != nil {
- return m.Type
- }
- return nil
-}
-
-type IndexShardSettings struct {
- PrevNumShards []int32 `protobuf:"varint,1,rep,name=prev_num_shards" json:"prev_num_shards,omitempty"`
- NumShards *int32 `protobuf:"varint,2,req,name=num_shards,def=1" json:"num_shards,omitempty"`
- PrevNumShardsSearchFalse []int32 `protobuf:"varint,3,rep,name=prev_num_shards_search_false" json:"prev_num_shards_search_false,omitempty"`
- LocalReplica *string `protobuf:"bytes,4,opt,name=local_replica,def=" json:"local_replica,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexShardSettings) Reset() { *m = IndexShardSettings{} }
-func (m *IndexShardSettings) String() string { return proto.CompactTextString(m) }
-func (*IndexShardSettings) ProtoMessage() {}
-
-const Default_IndexShardSettings_NumShards int32 = 1
-
-func (m *IndexShardSettings) GetPrevNumShards() []int32 {
- if m != nil {
- return m.PrevNumShards
- }
- return nil
-}
-
-func (m *IndexShardSettings) GetNumShards() int32 {
- if m != nil && m.NumShards != nil {
- return *m.NumShards
- }
- return Default_IndexShardSettings_NumShards
-}
-
-func (m *IndexShardSettings) GetPrevNumShardsSearchFalse() []int32 {
- if m != nil {
- return m.PrevNumShardsSearchFalse
- }
- return nil
-}
-
-func (m *IndexShardSettings) GetLocalReplica() string {
- if m != nil && m.LocalReplica != nil {
- return *m.LocalReplica
- }
- return ""
-}
-
-type FacetValue struct {
- Type *FacetValue_ContentType `protobuf:"varint,1,opt,name=type,enum=search.FacetValue_ContentType,def=2" json:"type,omitempty"`
- StringValue *string `protobuf:"bytes,3,opt,name=string_value" json:"string_value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetValue) Reset() { *m = FacetValue{} }
-func (m *FacetValue) String() string { return proto.CompactTextString(m) }
-func (*FacetValue) ProtoMessage() {}
-
-const Default_FacetValue_Type FacetValue_ContentType = FacetValue_ATOM
-
-func (m *FacetValue) GetType() FacetValue_ContentType {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return Default_FacetValue_Type
-}
-
-func (m *FacetValue) GetStringValue() string {
- if m != nil && m.StringValue != nil {
- return *m.StringValue
- }
- return ""
-}
-
-type Facet struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Value *FacetValue `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Facet) Reset() { *m = Facet{} }
-func (m *Facet) String() string { return proto.CompactTextString(m) }
-func (*Facet) ProtoMessage() {}
-
-func (m *Facet) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *Facet) GetValue() *FacetValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type DocumentMetadata struct {
- Version *int64 `protobuf:"varint,1,opt,name=version" json:"version,omitempty"`
- CommittedStVersion *int64 `protobuf:"varint,2,opt,name=committed_st_version" json:"committed_st_version,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DocumentMetadata) Reset() { *m = DocumentMetadata{} }
-func (m *DocumentMetadata) String() string { return proto.CompactTextString(m) }
-func (*DocumentMetadata) ProtoMessage() {}
-
-func (m *DocumentMetadata) GetVersion() int64 {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return 0
-}
-
-func (m *DocumentMetadata) GetCommittedStVersion() int64 {
- if m != nil && m.CommittedStVersion != nil {
- return *m.CommittedStVersion
- }
- return 0
-}
-
-type Document struct {
- Id *string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
- Language *string `protobuf:"bytes,2,opt,name=language,def=en" json:"language,omitempty"`
- Field []*Field `protobuf:"bytes,3,rep,name=field" json:"field,omitempty"`
- OrderId *int32 `protobuf:"varint,4,opt,name=order_id" json:"order_id,omitempty"`
- Storage *Document_Storage `protobuf:"varint,5,opt,name=storage,enum=search.Document_Storage,def=0" json:"storage,omitempty"`
- Facet []*Facet `protobuf:"bytes,8,rep,name=facet" json:"facet,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *Document) Reset() { *m = Document{} }
-func (m *Document) String() string { return proto.CompactTextString(m) }
-func (*Document) ProtoMessage() {}
-
-const Default_Document_Language string = "en"
-const Default_Document_Storage Document_Storage = Document_DISK
-
-func (m *Document) GetId() string {
- if m != nil && m.Id != nil {
- return *m.Id
- }
- return ""
-}
-
-func (m *Document) GetLanguage() string {
- if m != nil && m.Language != nil {
- return *m.Language
- }
- return Default_Document_Language
-}
-
-func (m *Document) GetField() []*Field {
- if m != nil {
- return m.Field
- }
- return nil
-}
-
-func (m *Document) GetOrderId() int32 {
- if m != nil && m.OrderId != nil {
- return *m.OrderId
- }
- return 0
-}
-
-func (m *Document) GetStorage() Document_Storage {
- if m != nil && m.Storage != nil {
- return *m.Storage
- }
- return Default_Document_Storage
-}
-
-func (m *Document) GetFacet() []*Facet {
- if m != nil {
- return m.Facet
- }
- return nil
-}
-
-type SearchServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SearchServiceError) Reset() { *m = SearchServiceError{} }
-func (m *SearchServiceError) String() string { return proto.CompactTextString(m) }
-func (*SearchServiceError) ProtoMessage() {}
-
-type RequestStatus struct {
- Code *SearchServiceError_ErrorCode `protobuf:"varint,1,req,name=code,enum=search.SearchServiceError_ErrorCode" json:"code,omitempty"`
- ErrorDetail *string `protobuf:"bytes,2,opt,name=error_detail" json:"error_detail,omitempty"`
- CanonicalCode *int32 `protobuf:"varint,3,opt,name=canonical_code" json:"canonical_code,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *RequestStatus) Reset() { *m = RequestStatus{} }
-func (m *RequestStatus) String() string { return proto.CompactTextString(m) }
-func (*RequestStatus) ProtoMessage() {}
-
-func (m *RequestStatus) GetCode() SearchServiceError_ErrorCode {
- if m != nil && m.Code != nil {
- return *m.Code
- }
- return SearchServiceError_OK
-}
-
-func (m *RequestStatus) GetErrorDetail() string {
- if m != nil && m.ErrorDetail != nil {
- return *m.ErrorDetail
- }
- return ""
-}
-
-func (m *RequestStatus) GetCanonicalCode() int32 {
- if m != nil && m.CanonicalCode != nil {
- return *m.CanonicalCode
- }
- return 0
-}
-
-type IndexSpec struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Consistency *IndexSpec_Consistency `protobuf:"varint,2,opt,name=consistency,enum=search.IndexSpec_Consistency,def=1" json:"consistency,omitempty"`
- Namespace *string `protobuf:"bytes,3,opt,name=namespace" json:"namespace,omitempty"`
- Version *int32 `protobuf:"varint,4,opt,name=version" json:"version,omitempty"`
- Source *IndexSpec_Source `protobuf:"varint,5,opt,name=source,enum=search.IndexSpec_Source,def=0" json:"source,omitempty"`
- Mode *IndexSpec_Mode `protobuf:"varint,6,opt,name=mode,enum=search.IndexSpec_Mode,def=0" json:"mode,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexSpec) Reset() { *m = IndexSpec{} }
-func (m *IndexSpec) String() string { return proto.CompactTextString(m) }
-func (*IndexSpec) ProtoMessage() {}
-
-const Default_IndexSpec_Consistency IndexSpec_Consistency = IndexSpec_PER_DOCUMENT
-const Default_IndexSpec_Source IndexSpec_Source = IndexSpec_SEARCH
-const Default_IndexSpec_Mode IndexSpec_Mode = IndexSpec_PRIORITY
-
-func (m *IndexSpec) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *IndexSpec) GetConsistency() IndexSpec_Consistency {
- if m != nil && m.Consistency != nil {
- return *m.Consistency
- }
- return Default_IndexSpec_Consistency
-}
-
-func (m *IndexSpec) GetNamespace() string {
- if m != nil && m.Namespace != nil {
- return *m.Namespace
- }
- return ""
-}
-
-func (m *IndexSpec) GetVersion() int32 {
- if m != nil && m.Version != nil {
- return *m.Version
- }
- return 0
-}
-
-func (m *IndexSpec) GetSource() IndexSpec_Source {
- if m != nil && m.Source != nil {
- return *m.Source
- }
- return Default_IndexSpec_Source
-}
-
-func (m *IndexSpec) GetMode() IndexSpec_Mode {
- if m != nil && m.Mode != nil {
- return *m.Mode
- }
- return Default_IndexSpec_Mode
-}
-
-type IndexMetadata struct {
- IndexSpec *IndexSpec `protobuf:"bytes,1,req,name=index_spec" json:"index_spec,omitempty"`
- Field []*FieldTypes `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"`
- Storage *IndexMetadata_Storage `protobuf:"bytes,3,opt,name=storage" json:"storage,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexMetadata) Reset() { *m = IndexMetadata{} }
-func (m *IndexMetadata) String() string { return proto.CompactTextString(m) }
-func (*IndexMetadata) ProtoMessage() {}
-
-func (m *IndexMetadata) GetIndexSpec() *IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-func (m *IndexMetadata) GetField() []*FieldTypes {
- if m != nil {
- return m.Field
- }
- return nil
-}
-
-func (m *IndexMetadata) GetStorage() *IndexMetadata_Storage {
- if m != nil {
- return m.Storage
- }
- return nil
-}
-
-type IndexMetadata_Storage struct {
- AmountUsed *int64 `protobuf:"varint,1,opt,name=amount_used" json:"amount_used,omitempty"`
- Limit *int64 `protobuf:"varint,2,opt,name=limit" json:"limit,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexMetadata_Storage) Reset() { *m = IndexMetadata_Storage{} }
-func (m *IndexMetadata_Storage) String() string { return proto.CompactTextString(m) }
-func (*IndexMetadata_Storage) ProtoMessage() {}
-
-func (m *IndexMetadata_Storage) GetAmountUsed() int64 {
- if m != nil && m.AmountUsed != nil {
- return *m.AmountUsed
- }
- return 0
-}
-
-func (m *IndexMetadata_Storage) GetLimit() int64 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return 0
-}
-
-type IndexDocumentParams struct {
- Document []*Document `protobuf:"bytes,1,rep,name=document" json:"document,omitempty"`
- Freshness *IndexDocumentParams_Freshness `protobuf:"varint,2,opt,name=freshness,enum=search.IndexDocumentParams_Freshness,def=0" json:"freshness,omitempty"`
- IndexSpec *IndexSpec `protobuf:"bytes,3,req,name=index_spec" json:"index_spec,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexDocumentParams) Reset() { *m = IndexDocumentParams{} }
-func (m *IndexDocumentParams) String() string { return proto.CompactTextString(m) }
-func (*IndexDocumentParams) ProtoMessage() {}
-
-const Default_IndexDocumentParams_Freshness IndexDocumentParams_Freshness = IndexDocumentParams_SYNCHRONOUSLY
-
-func (m *IndexDocumentParams) GetDocument() []*Document {
- if m != nil {
- return m.Document
- }
- return nil
-}
-
-func (m *IndexDocumentParams) GetFreshness() IndexDocumentParams_Freshness {
- if m != nil && m.Freshness != nil {
- return *m.Freshness
- }
- return Default_IndexDocumentParams_Freshness
-}
-
-func (m *IndexDocumentParams) GetIndexSpec() *IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-type IndexDocumentRequest struct {
- Params *IndexDocumentParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexDocumentRequest) Reset() { *m = IndexDocumentRequest{} }
-func (m *IndexDocumentRequest) String() string { return proto.CompactTextString(m) }
-func (*IndexDocumentRequest) ProtoMessage() {}
-
-func (m *IndexDocumentRequest) GetParams() *IndexDocumentParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *IndexDocumentRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type IndexDocumentResponse struct {
- Status []*RequestStatus `protobuf:"bytes,1,rep,name=status" json:"status,omitempty"`
- DocId []string `protobuf:"bytes,2,rep,name=doc_id" json:"doc_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *IndexDocumentResponse) Reset() { *m = IndexDocumentResponse{} }
-func (m *IndexDocumentResponse) String() string { return proto.CompactTextString(m) }
-func (*IndexDocumentResponse) ProtoMessage() {}
-
-func (m *IndexDocumentResponse) GetStatus() []*RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-func (m *IndexDocumentResponse) GetDocId() []string {
- if m != nil {
- return m.DocId
- }
- return nil
-}
-
-type DeleteDocumentParams struct {
- DocId []string `protobuf:"bytes,1,rep,name=doc_id" json:"doc_id,omitempty"`
- IndexSpec *IndexSpec `protobuf:"bytes,2,req,name=index_spec" json:"index_spec,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteDocumentParams) Reset() { *m = DeleteDocumentParams{} }
-func (m *DeleteDocumentParams) String() string { return proto.CompactTextString(m) }
-func (*DeleteDocumentParams) ProtoMessage() {}
-
-func (m *DeleteDocumentParams) GetDocId() []string {
- if m != nil {
- return m.DocId
- }
- return nil
-}
-
-func (m *DeleteDocumentParams) GetIndexSpec() *IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-type DeleteDocumentRequest struct {
- Params *DeleteDocumentParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteDocumentRequest) Reset() { *m = DeleteDocumentRequest{} }
-func (m *DeleteDocumentRequest) String() string { return proto.CompactTextString(m) }
-func (*DeleteDocumentRequest) ProtoMessage() {}
-
-func (m *DeleteDocumentRequest) GetParams() *DeleteDocumentParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *DeleteDocumentRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type DeleteDocumentResponse struct {
- Status []*RequestStatus `protobuf:"bytes,1,rep,name=status" json:"status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteDocumentResponse) Reset() { *m = DeleteDocumentResponse{} }
-func (m *DeleteDocumentResponse) String() string { return proto.CompactTextString(m) }
-func (*DeleteDocumentResponse) ProtoMessage() {}
-
-func (m *DeleteDocumentResponse) GetStatus() []*RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-type ListDocumentsParams struct {
- IndexSpec *IndexSpec `protobuf:"bytes,1,req,name=index_spec" json:"index_spec,omitempty"`
- StartDocId *string `protobuf:"bytes,2,opt,name=start_doc_id" json:"start_doc_id,omitempty"`
- IncludeStartDoc *bool `protobuf:"varint,3,opt,name=include_start_doc,def=1" json:"include_start_doc,omitempty"`
- Limit *int32 `protobuf:"varint,4,opt,name=limit,def=100" json:"limit,omitempty"`
- KeysOnly *bool `protobuf:"varint,5,opt,name=keys_only" json:"keys_only,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListDocumentsParams) Reset() { *m = ListDocumentsParams{} }
-func (m *ListDocumentsParams) String() string { return proto.CompactTextString(m) }
-func (*ListDocumentsParams) ProtoMessage() {}
-
-const Default_ListDocumentsParams_IncludeStartDoc bool = true
-const Default_ListDocumentsParams_Limit int32 = 100
-
-func (m *ListDocumentsParams) GetIndexSpec() *IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-func (m *ListDocumentsParams) GetStartDocId() string {
- if m != nil && m.StartDocId != nil {
- return *m.StartDocId
- }
- return ""
-}
-
-func (m *ListDocumentsParams) GetIncludeStartDoc() bool {
- if m != nil && m.IncludeStartDoc != nil {
- return *m.IncludeStartDoc
- }
- return Default_ListDocumentsParams_IncludeStartDoc
-}
-
-func (m *ListDocumentsParams) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return Default_ListDocumentsParams_Limit
-}
-
-func (m *ListDocumentsParams) GetKeysOnly() bool {
- if m != nil && m.KeysOnly != nil {
- return *m.KeysOnly
- }
- return false
-}
-
-type ListDocumentsRequest struct {
- Params *ListDocumentsParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,2,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListDocumentsRequest) Reset() { *m = ListDocumentsRequest{} }
-func (m *ListDocumentsRequest) String() string { return proto.CompactTextString(m) }
-func (*ListDocumentsRequest) ProtoMessage() {}
-
-func (m *ListDocumentsRequest) GetParams() *ListDocumentsParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *ListDocumentsRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type ListDocumentsResponse struct {
- Status *RequestStatus `protobuf:"bytes,1,req,name=status" json:"status,omitempty"`
- Document []*Document `protobuf:"bytes,2,rep,name=document" json:"document,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListDocumentsResponse) Reset() { *m = ListDocumentsResponse{} }
-func (m *ListDocumentsResponse) String() string { return proto.CompactTextString(m) }
-func (*ListDocumentsResponse) ProtoMessage() {}
-
-func (m *ListDocumentsResponse) GetStatus() *RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-func (m *ListDocumentsResponse) GetDocument() []*Document {
- if m != nil {
- return m.Document
- }
- return nil
-}
-
-type ListIndexesParams struct {
- FetchSchema *bool `protobuf:"varint,1,opt,name=fetch_schema" json:"fetch_schema,omitempty"`
- Limit *int32 `protobuf:"varint,2,opt,name=limit,def=20" json:"limit,omitempty"`
- Namespace *string `protobuf:"bytes,3,opt,name=namespace" json:"namespace,omitempty"`
- StartIndexName *string `protobuf:"bytes,4,opt,name=start_index_name" json:"start_index_name,omitempty"`
- IncludeStartIndex *bool `protobuf:"varint,5,opt,name=include_start_index,def=1" json:"include_start_index,omitempty"`
- IndexNamePrefix *string `protobuf:"bytes,6,opt,name=index_name_prefix" json:"index_name_prefix,omitempty"`
- Offset *int32 `protobuf:"varint,7,opt,name=offset" json:"offset,omitempty"`
- Source *IndexSpec_Source `protobuf:"varint,8,opt,name=source,enum=search.IndexSpec_Source,def=0" json:"source,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListIndexesParams) Reset() { *m = ListIndexesParams{} }
-func (m *ListIndexesParams) String() string { return proto.CompactTextString(m) }
-func (*ListIndexesParams) ProtoMessage() {}
-
-const Default_ListIndexesParams_Limit int32 = 20
-const Default_ListIndexesParams_IncludeStartIndex bool = true
-const Default_ListIndexesParams_Source IndexSpec_Source = IndexSpec_SEARCH
-
-func (m *ListIndexesParams) GetFetchSchema() bool {
- if m != nil && m.FetchSchema != nil {
- return *m.FetchSchema
- }
- return false
-}
-
-func (m *ListIndexesParams) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return Default_ListIndexesParams_Limit
-}
-
-func (m *ListIndexesParams) GetNamespace() string {
- if m != nil && m.Namespace != nil {
- return *m.Namespace
- }
- return ""
-}
-
-func (m *ListIndexesParams) GetStartIndexName() string {
- if m != nil && m.StartIndexName != nil {
- return *m.StartIndexName
- }
- return ""
-}
-
-func (m *ListIndexesParams) GetIncludeStartIndex() bool {
- if m != nil && m.IncludeStartIndex != nil {
- return *m.IncludeStartIndex
- }
- return Default_ListIndexesParams_IncludeStartIndex
-}
-
-func (m *ListIndexesParams) GetIndexNamePrefix() string {
- if m != nil && m.IndexNamePrefix != nil {
- return *m.IndexNamePrefix
- }
- return ""
-}
-
-func (m *ListIndexesParams) GetOffset() int32 {
- if m != nil && m.Offset != nil {
- return *m.Offset
- }
- return 0
-}
-
-func (m *ListIndexesParams) GetSource() IndexSpec_Source {
- if m != nil && m.Source != nil {
- return *m.Source
- }
- return Default_ListIndexesParams_Source
-}
-
-type ListIndexesRequest struct {
- Params *ListIndexesParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListIndexesRequest) Reset() { *m = ListIndexesRequest{} }
-func (m *ListIndexesRequest) String() string { return proto.CompactTextString(m) }
-func (*ListIndexesRequest) ProtoMessage() {}
-
-func (m *ListIndexesRequest) GetParams() *ListIndexesParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *ListIndexesRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type ListIndexesResponse struct {
- Status *RequestStatus `protobuf:"bytes,1,req,name=status" json:"status,omitempty"`
- IndexMetadata []*IndexMetadata `protobuf:"bytes,2,rep,name=index_metadata" json:"index_metadata,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListIndexesResponse) Reset() { *m = ListIndexesResponse{} }
-func (m *ListIndexesResponse) String() string { return proto.CompactTextString(m) }
-func (*ListIndexesResponse) ProtoMessage() {}
-
-func (m *ListIndexesResponse) GetStatus() *RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-func (m *ListIndexesResponse) GetIndexMetadata() []*IndexMetadata {
- if m != nil {
- return m.IndexMetadata
- }
- return nil
-}
-
-type DeleteSchemaParams struct {
- Source *IndexSpec_Source `protobuf:"varint,1,opt,name=source,enum=search.IndexSpec_Source,def=0" json:"source,omitempty"`
- IndexSpec []*IndexSpec `protobuf:"bytes,2,rep,name=index_spec" json:"index_spec,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteSchemaParams) Reset() { *m = DeleteSchemaParams{} }
-func (m *DeleteSchemaParams) String() string { return proto.CompactTextString(m) }
-func (*DeleteSchemaParams) ProtoMessage() {}
-
-const Default_DeleteSchemaParams_Source IndexSpec_Source = IndexSpec_SEARCH
-
-func (m *DeleteSchemaParams) GetSource() IndexSpec_Source {
- if m != nil && m.Source != nil {
- return *m.Source
- }
- return Default_DeleteSchemaParams_Source
-}
-
-func (m *DeleteSchemaParams) GetIndexSpec() []*IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-type DeleteSchemaRequest struct {
- Params *DeleteSchemaParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteSchemaRequest) Reset() { *m = DeleteSchemaRequest{} }
-func (m *DeleteSchemaRequest) String() string { return proto.CompactTextString(m) }
-func (*DeleteSchemaRequest) ProtoMessage() {}
-
-func (m *DeleteSchemaRequest) GetParams() *DeleteSchemaParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *DeleteSchemaRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type DeleteSchemaResponse struct {
- Status []*RequestStatus `protobuf:"bytes,1,rep,name=status" json:"status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *DeleteSchemaResponse) Reset() { *m = DeleteSchemaResponse{} }
-func (m *DeleteSchemaResponse) String() string { return proto.CompactTextString(m) }
-func (*DeleteSchemaResponse) ProtoMessage() {}
-
-func (m *DeleteSchemaResponse) GetStatus() []*RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-type SortSpec struct {
- SortExpression *string `protobuf:"bytes,1,req,name=sort_expression" json:"sort_expression,omitempty"`
- SortDescending *bool `protobuf:"varint,2,opt,name=sort_descending,def=1" json:"sort_descending,omitempty"`
- DefaultValueText *string `protobuf:"bytes,4,opt,name=default_value_text" json:"default_value_text,omitempty"`
- DefaultValueNumeric *float64 `protobuf:"fixed64,5,opt,name=default_value_numeric" json:"default_value_numeric,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SortSpec) Reset() { *m = SortSpec{} }
-func (m *SortSpec) String() string { return proto.CompactTextString(m) }
-func (*SortSpec) ProtoMessage() {}
-
-const Default_SortSpec_SortDescending bool = true
-
-func (m *SortSpec) GetSortExpression() string {
- if m != nil && m.SortExpression != nil {
- return *m.SortExpression
- }
- return ""
-}
-
-func (m *SortSpec) GetSortDescending() bool {
- if m != nil && m.SortDescending != nil {
- return *m.SortDescending
- }
- return Default_SortSpec_SortDescending
-}
-
-func (m *SortSpec) GetDefaultValueText() string {
- if m != nil && m.DefaultValueText != nil {
- return *m.DefaultValueText
- }
- return ""
-}
-
-func (m *SortSpec) GetDefaultValueNumeric() float64 {
- if m != nil && m.DefaultValueNumeric != nil {
- return *m.DefaultValueNumeric
- }
- return 0
-}
-
-type ScorerSpec struct {
- Scorer *ScorerSpec_Scorer `protobuf:"varint,1,opt,name=scorer,enum=search.ScorerSpec_Scorer,def=2" json:"scorer,omitempty"`
- Limit *int32 `protobuf:"varint,2,opt,name=limit,def=1000" json:"limit,omitempty"`
- MatchScorerParameters *string `protobuf:"bytes,9,opt,name=match_scorer_parameters" json:"match_scorer_parameters,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ScorerSpec) Reset() { *m = ScorerSpec{} }
-func (m *ScorerSpec) String() string { return proto.CompactTextString(m) }
-func (*ScorerSpec) ProtoMessage() {}
-
-const Default_ScorerSpec_Scorer ScorerSpec_Scorer = ScorerSpec_MATCH_SCORER
-const Default_ScorerSpec_Limit int32 = 1000
-
-func (m *ScorerSpec) GetScorer() ScorerSpec_Scorer {
- if m != nil && m.Scorer != nil {
- return *m.Scorer
- }
- return Default_ScorerSpec_Scorer
-}
-
-func (m *ScorerSpec) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return Default_ScorerSpec_Limit
-}
-
-func (m *ScorerSpec) GetMatchScorerParameters() string {
- if m != nil && m.MatchScorerParameters != nil {
- return *m.MatchScorerParameters
- }
- return ""
-}
-
-type FieldSpec struct {
- Name []string `protobuf:"bytes,1,rep,name=name" json:"name,omitempty"`
- Expression []*FieldSpec_Expression `protobuf:"group,2,rep,name=Expression" json:"expression,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FieldSpec) Reset() { *m = FieldSpec{} }
-func (m *FieldSpec) String() string { return proto.CompactTextString(m) }
-func (*FieldSpec) ProtoMessage() {}
-
-func (m *FieldSpec) GetName() []string {
- if m != nil {
- return m.Name
- }
- return nil
-}
-
-func (m *FieldSpec) GetExpression() []*FieldSpec_Expression {
- if m != nil {
- return m.Expression
- }
- return nil
-}
-
-type FieldSpec_Expression struct {
- Name *string `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
- Expression *string `protobuf:"bytes,4,req,name=expression" json:"expression,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FieldSpec_Expression) Reset() { *m = FieldSpec_Expression{} }
-func (m *FieldSpec_Expression) String() string { return proto.CompactTextString(m) }
-func (*FieldSpec_Expression) ProtoMessage() {}
-
-func (m *FieldSpec_Expression) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FieldSpec_Expression) GetExpression() string {
- if m != nil && m.Expression != nil {
- return *m.Expression
- }
- return ""
-}
-
-type FacetRange struct {
- Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
- Start *string `protobuf:"bytes,2,opt,name=start" json:"start,omitempty"`
- End *string `protobuf:"bytes,3,opt,name=end" json:"end,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetRange) Reset() { *m = FacetRange{} }
-func (m *FacetRange) String() string { return proto.CompactTextString(m) }
-func (*FacetRange) ProtoMessage() {}
-
-func (m *FacetRange) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FacetRange) GetStart() string {
- if m != nil && m.Start != nil {
- return *m.Start
- }
- return ""
-}
-
-func (m *FacetRange) GetEnd() string {
- if m != nil && m.End != nil {
- return *m.End
- }
- return ""
-}
-
-type FacetRequestParam struct {
- ValueLimit *int32 `protobuf:"varint,1,opt,name=value_limit" json:"value_limit,omitempty"`
- Range []*FacetRange `protobuf:"bytes,2,rep,name=range" json:"range,omitempty"`
- ValueConstraint []string `protobuf:"bytes,3,rep,name=value_constraint" json:"value_constraint,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetRequestParam) Reset() { *m = FacetRequestParam{} }
-func (m *FacetRequestParam) String() string { return proto.CompactTextString(m) }
-func (*FacetRequestParam) ProtoMessage() {}
-
-func (m *FacetRequestParam) GetValueLimit() int32 {
- if m != nil && m.ValueLimit != nil {
- return *m.ValueLimit
- }
- return 0
-}
-
-func (m *FacetRequestParam) GetRange() []*FacetRange {
- if m != nil {
- return m.Range
- }
- return nil
-}
-
-func (m *FacetRequestParam) GetValueConstraint() []string {
- if m != nil {
- return m.ValueConstraint
- }
- return nil
-}
-
-type FacetAutoDetectParam struct {
- ValueLimit *int32 `protobuf:"varint,1,opt,name=value_limit,def=10" json:"value_limit,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetAutoDetectParam) Reset() { *m = FacetAutoDetectParam{} }
-func (m *FacetAutoDetectParam) String() string { return proto.CompactTextString(m) }
-func (*FacetAutoDetectParam) ProtoMessage() {}
-
-const Default_FacetAutoDetectParam_ValueLimit int32 = 10
-
-func (m *FacetAutoDetectParam) GetValueLimit() int32 {
- if m != nil && m.ValueLimit != nil {
- return *m.ValueLimit
- }
- return Default_FacetAutoDetectParam_ValueLimit
-}
-
-type FacetRequest struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Params *FacetRequestParam `protobuf:"bytes,2,opt,name=params" json:"params,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetRequest) Reset() { *m = FacetRequest{} }
-func (m *FacetRequest) String() string { return proto.CompactTextString(m) }
-func (*FacetRequest) ProtoMessage() {}
-
-func (m *FacetRequest) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FacetRequest) GetParams() *FacetRequestParam {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-type FacetRefinement struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"`
- Range *FacetRefinement_Range `protobuf:"bytes,3,opt,name=range" json:"range,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetRefinement) Reset() { *m = FacetRefinement{} }
-func (m *FacetRefinement) String() string { return proto.CompactTextString(m) }
-func (*FacetRefinement) ProtoMessage() {}
-
-func (m *FacetRefinement) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FacetRefinement) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-func (m *FacetRefinement) GetRange() *FacetRefinement_Range {
- if m != nil {
- return m.Range
- }
- return nil
-}
-
-type FacetRefinement_Range struct {
- Start *string `protobuf:"bytes,1,opt,name=start" json:"start,omitempty"`
- End *string `protobuf:"bytes,2,opt,name=end" json:"end,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetRefinement_Range) Reset() { *m = FacetRefinement_Range{} }
-func (m *FacetRefinement_Range) String() string { return proto.CompactTextString(m) }
-func (*FacetRefinement_Range) ProtoMessage() {}
-
-func (m *FacetRefinement_Range) GetStart() string {
- if m != nil && m.Start != nil {
- return *m.Start
- }
- return ""
-}
-
-func (m *FacetRefinement_Range) GetEnd() string {
- if m != nil && m.End != nil {
- return *m.End
- }
- return ""
-}
-
-type SearchParams struct {
- IndexSpec *IndexSpec `protobuf:"bytes,1,req,name=index_spec" json:"index_spec,omitempty"`
- Query *string `protobuf:"bytes,2,req,name=query" json:"query,omitempty"`
- Cursor *string `protobuf:"bytes,4,opt,name=cursor" json:"cursor,omitempty"`
- Offset *int32 `protobuf:"varint,11,opt,name=offset" json:"offset,omitempty"`
- CursorType *SearchParams_CursorType `protobuf:"varint,5,opt,name=cursor_type,enum=search.SearchParams_CursorType,def=0" json:"cursor_type,omitempty"`
- Limit *int32 `protobuf:"varint,6,opt,name=limit,def=20" json:"limit,omitempty"`
- MatchedCountAccuracy *int32 `protobuf:"varint,7,opt,name=matched_count_accuracy" json:"matched_count_accuracy,omitempty"`
- SortSpec []*SortSpec `protobuf:"bytes,8,rep,name=sort_spec" json:"sort_spec,omitempty"`
- ScorerSpec *ScorerSpec `protobuf:"bytes,9,opt,name=scorer_spec" json:"scorer_spec,omitempty"`
- FieldSpec *FieldSpec `protobuf:"bytes,10,opt,name=field_spec" json:"field_spec,omitempty"`
- KeysOnly *bool `protobuf:"varint,12,opt,name=keys_only" json:"keys_only,omitempty"`
- ParsingMode *SearchParams_ParsingMode `protobuf:"varint,13,opt,name=parsing_mode,enum=search.SearchParams_ParsingMode,def=0" json:"parsing_mode,omitempty"`
- AutoDiscoverFacetCount *int32 `protobuf:"varint,15,opt,name=auto_discover_facet_count,def=0" json:"auto_discover_facet_count,omitempty"`
- IncludeFacet []*FacetRequest `protobuf:"bytes,16,rep,name=include_facet" json:"include_facet,omitempty"`
- FacetRefinement []*FacetRefinement `protobuf:"bytes,17,rep,name=facet_refinement" json:"facet_refinement,omitempty"`
- FacetAutoDetectParam *FacetAutoDetectParam `protobuf:"bytes,18,opt,name=facet_auto_detect_param" json:"facet_auto_detect_param,omitempty"`
- FacetDepth *int32 `protobuf:"varint,19,opt,name=facet_depth,def=1000" json:"facet_depth,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SearchParams) Reset() { *m = SearchParams{} }
-func (m *SearchParams) String() string { return proto.CompactTextString(m) }
-func (*SearchParams) ProtoMessage() {}
-
-const Default_SearchParams_CursorType SearchParams_CursorType = SearchParams_NONE
-const Default_SearchParams_Limit int32 = 20
-const Default_SearchParams_ParsingMode SearchParams_ParsingMode = SearchParams_STRICT
-const Default_SearchParams_AutoDiscoverFacetCount int32 = 0
-const Default_SearchParams_FacetDepth int32 = 1000
-
-func (m *SearchParams) GetIndexSpec() *IndexSpec {
- if m != nil {
- return m.IndexSpec
- }
- return nil
-}
-
-func (m *SearchParams) GetQuery() string {
- if m != nil && m.Query != nil {
- return *m.Query
- }
- return ""
-}
-
-func (m *SearchParams) GetCursor() string {
- if m != nil && m.Cursor != nil {
- return *m.Cursor
- }
- return ""
-}
-
-func (m *SearchParams) GetOffset() int32 {
- if m != nil && m.Offset != nil {
- return *m.Offset
- }
- return 0
-}
-
-func (m *SearchParams) GetCursorType() SearchParams_CursorType {
- if m != nil && m.CursorType != nil {
- return *m.CursorType
- }
- return Default_SearchParams_CursorType
-}
-
-func (m *SearchParams) GetLimit() int32 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return Default_SearchParams_Limit
-}
-
-func (m *SearchParams) GetMatchedCountAccuracy() int32 {
- if m != nil && m.MatchedCountAccuracy != nil {
- return *m.MatchedCountAccuracy
- }
- return 0
-}
-
-func (m *SearchParams) GetSortSpec() []*SortSpec {
- if m != nil {
- return m.SortSpec
- }
- return nil
-}
-
-func (m *SearchParams) GetScorerSpec() *ScorerSpec {
- if m != nil {
- return m.ScorerSpec
- }
- return nil
-}
-
-func (m *SearchParams) GetFieldSpec() *FieldSpec {
- if m != nil {
- return m.FieldSpec
- }
- return nil
-}
-
-func (m *SearchParams) GetKeysOnly() bool {
- if m != nil && m.KeysOnly != nil {
- return *m.KeysOnly
- }
- return false
-}
-
-func (m *SearchParams) GetParsingMode() SearchParams_ParsingMode {
- if m != nil && m.ParsingMode != nil {
- return *m.ParsingMode
- }
- return Default_SearchParams_ParsingMode
-}
-
-func (m *SearchParams) GetAutoDiscoverFacetCount() int32 {
- if m != nil && m.AutoDiscoverFacetCount != nil {
- return *m.AutoDiscoverFacetCount
- }
- return Default_SearchParams_AutoDiscoverFacetCount
-}
-
-func (m *SearchParams) GetIncludeFacet() []*FacetRequest {
- if m != nil {
- return m.IncludeFacet
- }
- return nil
-}
-
-func (m *SearchParams) GetFacetRefinement() []*FacetRefinement {
- if m != nil {
- return m.FacetRefinement
- }
- return nil
-}
-
-func (m *SearchParams) GetFacetAutoDetectParam() *FacetAutoDetectParam {
- if m != nil {
- return m.FacetAutoDetectParam
- }
- return nil
-}
-
-func (m *SearchParams) GetFacetDepth() int32 {
- if m != nil && m.FacetDepth != nil {
- return *m.FacetDepth
- }
- return Default_SearchParams_FacetDepth
-}
-
-type SearchRequest struct {
- Params *SearchParams `protobuf:"bytes,1,req,name=params" json:"params,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SearchRequest) Reset() { *m = SearchRequest{} }
-func (m *SearchRequest) String() string { return proto.CompactTextString(m) }
-func (*SearchRequest) ProtoMessage() {}
-
-func (m *SearchRequest) GetParams() *SearchParams {
- if m != nil {
- return m.Params
- }
- return nil
-}
-
-func (m *SearchRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type FacetResultValue struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Count *int32 `protobuf:"varint,2,req,name=count" json:"count,omitempty"`
- Refinement *FacetRefinement `protobuf:"bytes,3,req,name=refinement" json:"refinement,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetResultValue) Reset() { *m = FacetResultValue{} }
-func (m *FacetResultValue) String() string { return proto.CompactTextString(m) }
-func (*FacetResultValue) ProtoMessage() {}
-
-func (m *FacetResultValue) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FacetResultValue) GetCount() int32 {
- if m != nil && m.Count != nil {
- return *m.Count
- }
- return 0
-}
-
-func (m *FacetResultValue) GetRefinement() *FacetRefinement {
- if m != nil {
- return m.Refinement
- }
- return nil
-}
-
-type FacetResult struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- Value []*FacetResultValue `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *FacetResult) Reset() { *m = FacetResult{} }
-func (m *FacetResult) String() string { return proto.CompactTextString(m) }
-func (*FacetResult) ProtoMessage() {}
-
-func (m *FacetResult) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *FacetResult) GetValue() []*FacetResultValue {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type SearchResult struct {
- Document *Document `protobuf:"bytes,1,req,name=document" json:"document,omitempty"`
- Expression []*Field `protobuf:"bytes,4,rep,name=expression" json:"expression,omitempty"`
- Score []float64 `protobuf:"fixed64,2,rep,name=score" json:"score,omitempty"`
- Cursor *string `protobuf:"bytes,3,opt,name=cursor" json:"cursor,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SearchResult) Reset() { *m = SearchResult{} }
-func (m *SearchResult) String() string { return proto.CompactTextString(m) }
-func (*SearchResult) ProtoMessage() {}
-
-func (m *SearchResult) GetDocument() *Document {
- if m != nil {
- return m.Document
- }
- return nil
-}
-
-func (m *SearchResult) GetExpression() []*Field {
- if m != nil {
- return m.Expression
- }
- return nil
-}
-
-func (m *SearchResult) GetScore() []float64 {
- if m != nil {
- return m.Score
- }
- return nil
-}
-
-func (m *SearchResult) GetCursor() string {
- if m != nil && m.Cursor != nil {
- return *m.Cursor
- }
- return ""
-}
-
-type SearchResponse struct {
- Result []*SearchResult `protobuf:"bytes,1,rep,name=result" json:"result,omitempty"`
- MatchedCount *int64 `protobuf:"varint,2,req,name=matched_count" json:"matched_count,omitempty"`
- Status *RequestStatus `protobuf:"bytes,3,req,name=status" json:"status,omitempty"`
- Cursor *string `protobuf:"bytes,4,opt,name=cursor" json:"cursor,omitempty"`
- FacetResult []*FacetResult `protobuf:"bytes,5,rep,name=facet_result" json:"facet_result,omitempty"`
- XXX_extensions map[int32]proto.Extension `json:"-"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SearchResponse) Reset() { *m = SearchResponse{} }
-func (m *SearchResponse) String() string { return proto.CompactTextString(m) }
-func (*SearchResponse) ProtoMessage() {}
-
-var extRange_SearchResponse = []proto.ExtensionRange{
- {1000, 9999},
-}
-
-func (*SearchResponse) ExtensionRangeArray() []proto.ExtensionRange {
- return extRange_SearchResponse
-}
-func (m *SearchResponse) ExtensionMap() map[int32]proto.Extension {
- if m.XXX_extensions == nil {
- m.XXX_extensions = make(map[int32]proto.Extension)
- }
- return m.XXX_extensions
-}
-
-func (m *SearchResponse) GetResult() []*SearchResult {
- if m != nil {
- return m.Result
- }
- return nil
-}
-
-func (m *SearchResponse) GetMatchedCount() int64 {
- if m != nil && m.MatchedCount != nil {
- return *m.MatchedCount
- }
- return 0
-}
-
-func (m *SearchResponse) GetStatus() *RequestStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-func (m *SearchResponse) GetCursor() string {
- if m != nil && m.Cursor != nil {
- return *m.Cursor
- }
- return ""
-}
-
-func (m *SearchResponse) GetFacetResult() []*FacetResult {
- if m != nil {
- return m.FacetResult
- }
- return nil
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/search/search.proto b/vendor/google.golang.org/appengine/internal/search/search.proto
deleted file mode 100644
index 219f4c3..0000000
--- a/vendor/google.golang.org/appengine/internal/search/search.proto
+++ /dev/null
@@ -1,388 +0,0 @@
-syntax = "proto2";
-option go_package = "search";
-
-package search;
-
-message Scope {
- enum Type {
- USER_BY_CANONICAL_ID = 1;
- USER_BY_EMAIL = 2;
- GROUP_BY_CANONICAL_ID = 3;
- GROUP_BY_EMAIL = 4;
- GROUP_BY_DOMAIN = 5;
- ALL_USERS = 6;
- ALL_AUTHENTICATED_USERS = 7;
- }
-
- optional Type type = 1;
- optional string value = 2;
-}
-
-message Entry {
- enum Permission {
- READ = 1;
- WRITE = 2;
- FULL_CONTROL = 3;
- }
-
- optional Scope scope = 1;
- optional Permission permission = 2;
- optional string display_name = 3;
-}
-
-message AccessControlList {
- optional string owner = 1;
- repeated Entry entries = 2;
-}
-
-message FieldValue {
- enum ContentType {
- TEXT = 0;
- HTML = 1;
- ATOM = 2;
- DATE = 3;
- NUMBER = 4;
- GEO = 5;
- }
-
- optional ContentType type = 1 [default = TEXT];
-
- optional string language = 2 [default = "en"];
-
- optional string string_value = 3;
-
- optional group Geo = 4 {
- required double lat = 5;
- required double lng = 6;
- }
-}
-
-message Field {
- required string name = 1;
- required FieldValue value = 2;
-}
-
-message FieldTypes {
- required string name = 1;
- repeated FieldValue.ContentType type = 2;
-}
-
-message IndexShardSettings {
- repeated int32 prev_num_shards = 1;
- required int32 num_shards = 2 [default=1];
- repeated int32 prev_num_shards_search_false = 3;
- optional string local_replica = 4 [default = ""];
-}
-
-message FacetValue {
- enum ContentType {
- ATOM = 2;
- NUMBER = 4;
- }
-
- optional ContentType type = 1 [default = ATOM];
- optional string string_value = 3;
-}
-
-message Facet {
- required string name = 1;
- required FacetValue value = 2;
-}
-
-message DocumentMetadata {
- optional int64 version = 1;
- optional int64 committed_st_version = 2;
-}
-
-message Document {
- optional string id = 1;
- optional string language = 2 [default = "en"];
- repeated Field field = 3;
- optional int32 order_id = 4;
-
- enum Storage {
- DISK = 0;
- }
-
- optional Storage storage = 5 [default = DISK];
- repeated Facet facet = 8;
-}
-
-message SearchServiceError {
- enum ErrorCode {
- OK = 0;
- INVALID_REQUEST = 1;
- TRANSIENT_ERROR = 2;
- INTERNAL_ERROR = 3;
- PERMISSION_DENIED = 4;
- TIMEOUT = 5;
- CONCURRENT_TRANSACTION = 6;
- }
-}
-
-message RequestStatus {
- required SearchServiceError.ErrorCode code = 1;
- optional string error_detail = 2;
- optional int32 canonical_code = 3;
-}
-
-message IndexSpec {
- required string name = 1;
-
- enum Consistency {
- GLOBAL = 0;
- PER_DOCUMENT = 1;
- }
- optional Consistency consistency = 2 [default = PER_DOCUMENT];
-
- optional string namespace = 3;
- optional int32 version = 4;
-
- enum Source {
- SEARCH = 0;
- DATASTORE = 1;
- CLOUD_STORAGE = 2;
- }
- optional Source source = 5 [default = SEARCH];
-
- enum Mode {
- PRIORITY = 0;
- BACKGROUND = 1;
- }
- optional Mode mode = 6 [default = PRIORITY];
-}
-
-message IndexMetadata {
- required IndexSpec index_spec = 1;
-
- repeated FieldTypes field = 2;
-
- message Storage {
- optional int64 amount_used = 1;
- optional int64 limit = 2;
- }
- optional Storage storage = 3;
-}
-
-message IndexDocumentParams {
- repeated Document document = 1;
-
- enum Freshness {
- SYNCHRONOUSLY = 0;
- WHEN_CONVENIENT = 1;
- }
- optional Freshness freshness = 2 [default = SYNCHRONOUSLY, deprecated=true];
-
- required IndexSpec index_spec = 3;
-}
-
-message IndexDocumentRequest {
- required IndexDocumentParams params = 1;
-
- optional bytes app_id = 3;
-}
-
-message IndexDocumentResponse {
- repeated RequestStatus status = 1;
-
- repeated string doc_id = 2;
-}
-
-message DeleteDocumentParams {
- repeated string doc_id = 1;
-
- required IndexSpec index_spec = 2;
-}
-
-message DeleteDocumentRequest {
- required DeleteDocumentParams params = 1;
-
- optional bytes app_id = 3;
-}
-
-message DeleteDocumentResponse {
- repeated RequestStatus status = 1;
-}
-
-message ListDocumentsParams {
- required IndexSpec index_spec = 1;
- optional string start_doc_id = 2;
- optional bool include_start_doc = 3 [default = true];
- optional int32 limit = 4 [default = 100];
- optional bool keys_only = 5;
-}
-
-message ListDocumentsRequest {
- required ListDocumentsParams params = 1;
-
- optional bytes app_id = 2;
-}
-
-message ListDocumentsResponse {
- required RequestStatus status = 1;
-
- repeated Document document = 2;
-}
-
-message ListIndexesParams {
- optional bool fetch_schema = 1;
- optional int32 limit = 2 [default = 20];
- optional string namespace = 3;
- optional string start_index_name = 4;
- optional bool include_start_index = 5 [default = true];
- optional string index_name_prefix = 6;
- optional int32 offset = 7;
- optional IndexSpec.Source source = 8 [default = SEARCH];
-}
-
-message ListIndexesRequest {
- required ListIndexesParams params = 1;
-
- optional bytes app_id = 3;
-}
-
-message ListIndexesResponse {
- required RequestStatus status = 1;
- repeated IndexMetadata index_metadata = 2;
-}
-
-message DeleteSchemaParams {
- optional IndexSpec.Source source = 1 [default = SEARCH];
- repeated IndexSpec index_spec = 2;
-}
-
-message DeleteSchemaRequest {
- required DeleteSchemaParams params = 1;
-
- optional bytes app_id = 3;
-}
-
-message DeleteSchemaResponse {
- repeated RequestStatus status = 1;
-}
-
-message SortSpec {
- required string sort_expression = 1;
- optional bool sort_descending = 2 [default = true];
- optional string default_value_text = 4;
- optional double default_value_numeric = 5;
-}
-
-message ScorerSpec {
- enum Scorer {
- RESCORING_MATCH_SCORER = 0;
- MATCH_SCORER = 2;
- }
- optional Scorer scorer = 1 [default = MATCH_SCORER];
-
- optional int32 limit = 2 [default = 1000];
- optional string match_scorer_parameters = 9;
-}
-
-message FieldSpec {
- repeated string name = 1;
-
- repeated group Expression = 2 {
- required string name = 3;
- required string expression = 4;
- }
-}
-
-message FacetRange {
- optional string name = 1;
- optional string start = 2;
- optional string end = 3;
-}
-
-message FacetRequestParam {
- optional int32 value_limit = 1;
- repeated FacetRange range = 2;
- repeated string value_constraint = 3;
-}
-
-message FacetAutoDetectParam {
- optional int32 value_limit = 1 [default = 10];
-}
-
-message FacetRequest {
- required string name = 1;
- optional FacetRequestParam params = 2;
-}
-
-message FacetRefinement {
- required string name = 1;
- optional string value = 2;
-
- message Range {
- optional string start = 1;
- optional string end = 2;
- }
- optional Range range = 3;
-}
-
-message SearchParams {
- required IndexSpec index_spec = 1;
- required string query = 2;
- optional string cursor = 4;
- optional int32 offset = 11;
-
- enum CursorType {
- NONE = 0;
- SINGLE = 1;
- PER_RESULT = 2;
- }
- optional CursorType cursor_type = 5 [default = NONE];
-
- optional int32 limit = 6 [default = 20];
- optional int32 matched_count_accuracy = 7;
- repeated SortSpec sort_spec = 8;
- optional ScorerSpec scorer_spec = 9;
- optional FieldSpec field_spec = 10;
- optional bool keys_only = 12;
-
- enum ParsingMode {
- STRICT = 0;
- RELAXED = 1;
- }
- optional ParsingMode parsing_mode = 13 [default = STRICT];
-
- optional int32 auto_discover_facet_count = 15 [default = 0];
- repeated FacetRequest include_facet = 16;
- repeated FacetRefinement facet_refinement = 17;
- optional FacetAutoDetectParam facet_auto_detect_param = 18;
- optional int32 facet_depth = 19 [default=1000];
-}
-
-message SearchRequest {
- required SearchParams params = 1;
-
- optional bytes app_id = 3;
-}
-
-message FacetResultValue {
- required string name = 1;
- required int32 count = 2;
- required FacetRefinement refinement = 3;
-}
-
-message FacetResult {
- required string name = 1;
- repeated FacetResultValue value = 2;
-}
-
-message SearchResult {
- required Document document = 1;
- repeated Field expression = 4;
- repeated double score = 2;
- optional string cursor = 3;
-}
-
-message SearchResponse {
- repeated SearchResult result = 1;
- required int64 matched_count = 2;
- required RequestStatus status = 3;
- optional string cursor = 4;
- repeated FacetResult facet_result = 5;
-
- extensions 1000 to 9999;
-}
diff --git a/vendor/google.golang.org/appengine/internal/socket/socket_service.pb.go b/vendor/google.golang.org/appengine/internal/socket/socket_service.pb.go
deleted file mode 100644
index 60628ec..0000000
--- a/vendor/google.golang.org/appengine/internal/socket/socket_service.pb.go
+++ /dev/null
@@ -1,1858 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/socket/socket_service.proto
-// DO NOT EDIT!
-
-/*
-Package socket is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/socket/socket_service.proto
-
-It has these top-level messages:
- RemoteSocketServiceError
- AddressPort
- CreateSocketRequest
- CreateSocketReply
- BindRequest
- BindReply
- GetSocketNameRequest
- GetSocketNameReply
- GetPeerNameRequest
- GetPeerNameReply
- SocketOption
- SetSocketOptionsRequest
- SetSocketOptionsReply
- GetSocketOptionsRequest
- GetSocketOptionsReply
- ConnectRequest
- ConnectReply
- ListenRequest
- ListenReply
- AcceptRequest
- AcceptReply
- ShutDownRequest
- ShutDownReply
- CloseRequest
- CloseReply
- SendRequest
- SendReply
- ReceiveRequest
- ReceiveReply
- PollEvent
- PollRequest
- PollReply
- ResolveRequest
- ResolveReply
-*/
-package socket
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type RemoteSocketServiceError_ErrorCode int32
-
-const (
- RemoteSocketServiceError_SYSTEM_ERROR RemoteSocketServiceError_ErrorCode = 1
- RemoteSocketServiceError_GAI_ERROR RemoteSocketServiceError_ErrorCode = 2
- RemoteSocketServiceError_FAILURE RemoteSocketServiceError_ErrorCode = 4
- RemoteSocketServiceError_PERMISSION_DENIED RemoteSocketServiceError_ErrorCode = 5
- RemoteSocketServiceError_INVALID_REQUEST RemoteSocketServiceError_ErrorCode = 6
- RemoteSocketServiceError_SOCKET_CLOSED RemoteSocketServiceError_ErrorCode = 7
-)
-
-var RemoteSocketServiceError_ErrorCode_name = map[int32]string{
- 1: "SYSTEM_ERROR",
- 2: "GAI_ERROR",
- 4: "FAILURE",
- 5: "PERMISSION_DENIED",
- 6: "INVALID_REQUEST",
- 7: "SOCKET_CLOSED",
-}
-var RemoteSocketServiceError_ErrorCode_value = map[string]int32{
- "SYSTEM_ERROR": 1,
- "GAI_ERROR": 2,
- "FAILURE": 4,
- "PERMISSION_DENIED": 5,
- "INVALID_REQUEST": 6,
- "SOCKET_CLOSED": 7,
-}
-
-func (x RemoteSocketServiceError_ErrorCode) Enum() *RemoteSocketServiceError_ErrorCode {
- p := new(RemoteSocketServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x RemoteSocketServiceError_ErrorCode) String() string {
- return proto.EnumName(RemoteSocketServiceError_ErrorCode_name, int32(x))
-}
-func (x *RemoteSocketServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(RemoteSocketServiceError_ErrorCode_value, data, "RemoteSocketServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = RemoteSocketServiceError_ErrorCode(value)
- return nil
-}
-
-type RemoteSocketServiceError_SystemError int32
-
-const (
- RemoteSocketServiceError_SYS_SUCCESS RemoteSocketServiceError_SystemError = 0
- RemoteSocketServiceError_SYS_EPERM RemoteSocketServiceError_SystemError = 1
- RemoteSocketServiceError_SYS_ENOENT RemoteSocketServiceError_SystemError = 2
- RemoteSocketServiceError_SYS_ESRCH RemoteSocketServiceError_SystemError = 3
- RemoteSocketServiceError_SYS_EINTR RemoteSocketServiceError_SystemError = 4
- RemoteSocketServiceError_SYS_EIO RemoteSocketServiceError_SystemError = 5
- RemoteSocketServiceError_SYS_ENXIO RemoteSocketServiceError_SystemError = 6
- RemoteSocketServiceError_SYS_E2BIG RemoteSocketServiceError_SystemError = 7
- RemoteSocketServiceError_SYS_ENOEXEC RemoteSocketServiceError_SystemError = 8
- RemoteSocketServiceError_SYS_EBADF RemoteSocketServiceError_SystemError = 9
- RemoteSocketServiceError_SYS_ECHILD RemoteSocketServiceError_SystemError = 10
- RemoteSocketServiceError_SYS_EAGAIN RemoteSocketServiceError_SystemError = 11
- RemoteSocketServiceError_SYS_EWOULDBLOCK RemoteSocketServiceError_SystemError = 11
- RemoteSocketServiceError_SYS_ENOMEM RemoteSocketServiceError_SystemError = 12
- RemoteSocketServiceError_SYS_EACCES RemoteSocketServiceError_SystemError = 13
- RemoteSocketServiceError_SYS_EFAULT RemoteSocketServiceError_SystemError = 14
- RemoteSocketServiceError_SYS_ENOTBLK RemoteSocketServiceError_SystemError = 15
- RemoteSocketServiceError_SYS_EBUSY RemoteSocketServiceError_SystemError = 16
- RemoteSocketServiceError_SYS_EEXIST RemoteSocketServiceError_SystemError = 17
- RemoteSocketServiceError_SYS_EXDEV RemoteSocketServiceError_SystemError = 18
- RemoteSocketServiceError_SYS_ENODEV RemoteSocketServiceError_SystemError = 19
- RemoteSocketServiceError_SYS_ENOTDIR RemoteSocketServiceError_SystemError = 20
- RemoteSocketServiceError_SYS_EISDIR RemoteSocketServiceError_SystemError = 21
- RemoteSocketServiceError_SYS_EINVAL RemoteSocketServiceError_SystemError = 22
- RemoteSocketServiceError_SYS_ENFILE RemoteSocketServiceError_SystemError = 23
- RemoteSocketServiceError_SYS_EMFILE RemoteSocketServiceError_SystemError = 24
- RemoteSocketServiceError_SYS_ENOTTY RemoteSocketServiceError_SystemError = 25
- RemoteSocketServiceError_SYS_ETXTBSY RemoteSocketServiceError_SystemError = 26
- RemoteSocketServiceError_SYS_EFBIG RemoteSocketServiceError_SystemError = 27
- RemoteSocketServiceError_SYS_ENOSPC RemoteSocketServiceError_SystemError = 28
- RemoteSocketServiceError_SYS_ESPIPE RemoteSocketServiceError_SystemError = 29
- RemoteSocketServiceError_SYS_EROFS RemoteSocketServiceError_SystemError = 30
- RemoteSocketServiceError_SYS_EMLINK RemoteSocketServiceError_SystemError = 31
- RemoteSocketServiceError_SYS_EPIPE RemoteSocketServiceError_SystemError = 32
- RemoteSocketServiceError_SYS_EDOM RemoteSocketServiceError_SystemError = 33
- RemoteSocketServiceError_SYS_ERANGE RemoteSocketServiceError_SystemError = 34
- RemoteSocketServiceError_SYS_EDEADLK RemoteSocketServiceError_SystemError = 35
- RemoteSocketServiceError_SYS_EDEADLOCK RemoteSocketServiceError_SystemError = 35
- RemoteSocketServiceError_SYS_ENAMETOOLONG RemoteSocketServiceError_SystemError = 36
- RemoteSocketServiceError_SYS_ENOLCK RemoteSocketServiceError_SystemError = 37
- RemoteSocketServiceError_SYS_ENOSYS RemoteSocketServiceError_SystemError = 38
- RemoteSocketServiceError_SYS_ENOTEMPTY RemoteSocketServiceError_SystemError = 39
- RemoteSocketServiceError_SYS_ELOOP RemoteSocketServiceError_SystemError = 40
- RemoteSocketServiceError_SYS_ENOMSG RemoteSocketServiceError_SystemError = 42
- RemoteSocketServiceError_SYS_EIDRM RemoteSocketServiceError_SystemError = 43
- RemoteSocketServiceError_SYS_ECHRNG RemoteSocketServiceError_SystemError = 44
- RemoteSocketServiceError_SYS_EL2NSYNC RemoteSocketServiceError_SystemError = 45
- RemoteSocketServiceError_SYS_EL3HLT RemoteSocketServiceError_SystemError = 46
- RemoteSocketServiceError_SYS_EL3RST RemoteSocketServiceError_SystemError = 47
- RemoteSocketServiceError_SYS_ELNRNG RemoteSocketServiceError_SystemError = 48
- RemoteSocketServiceError_SYS_EUNATCH RemoteSocketServiceError_SystemError = 49
- RemoteSocketServiceError_SYS_ENOCSI RemoteSocketServiceError_SystemError = 50
- RemoteSocketServiceError_SYS_EL2HLT RemoteSocketServiceError_SystemError = 51
- RemoteSocketServiceError_SYS_EBADE RemoteSocketServiceError_SystemError = 52
- RemoteSocketServiceError_SYS_EBADR RemoteSocketServiceError_SystemError = 53
- RemoteSocketServiceError_SYS_EXFULL RemoteSocketServiceError_SystemError = 54
- RemoteSocketServiceError_SYS_ENOANO RemoteSocketServiceError_SystemError = 55
- RemoteSocketServiceError_SYS_EBADRQC RemoteSocketServiceError_SystemError = 56
- RemoteSocketServiceError_SYS_EBADSLT RemoteSocketServiceError_SystemError = 57
- RemoteSocketServiceError_SYS_EBFONT RemoteSocketServiceError_SystemError = 59
- RemoteSocketServiceError_SYS_ENOSTR RemoteSocketServiceError_SystemError = 60
- RemoteSocketServiceError_SYS_ENODATA RemoteSocketServiceError_SystemError = 61
- RemoteSocketServiceError_SYS_ETIME RemoteSocketServiceError_SystemError = 62
- RemoteSocketServiceError_SYS_ENOSR RemoteSocketServiceError_SystemError = 63
- RemoteSocketServiceError_SYS_ENONET RemoteSocketServiceError_SystemError = 64
- RemoteSocketServiceError_SYS_ENOPKG RemoteSocketServiceError_SystemError = 65
- RemoteSocketServiceError_SYS_EREMOTE RemoteSocketServiceError_SystemError = 66
- RemoteSocketServiceError_SYS_ENOLINK RemoteSocketServiceError_SystemError = 67
- RemoteSocketServiceError_SYS_EADV RemoteSocketServiceError_SystemError = 68
- RemoteSocketServiceError_SYS_ESRMNT RemoteSocketServiceError_SystemError = 69
- RemoteSocketServiceError_SYS_ECOMM RemoteSocketServiceError_SystemError = 70
- RemoteSocketServiceError_SYS_EPROTO RemoteSocketServiceError_SystemError = 71
- RemoteSocketServiceError_SYS_EMULTIHOP RemoteSocketServiceError_SystemError = 72
- RemoteSocketServiceError_SYS_EDOTDOT RemoteSocketServiceError_SystemError = 73
- RemoteSocketServiceError_SYS_EBADMSG RemoteSocketServiceError_SystemError = 74
- RemoteSocketServiceError_SYS_EOVERFLOW RemoteSocketServiceError_SystemError = 75
- RemoteSocketServiceError_SYS_ENOTUNIQ RemoteSocketServiceError_SystemError = 76
- RemoteSocketServiceError_SYS_EBADFD RemoteSocketServiceError_SystemError = 77
- RemoteSocketServiceError_SYS_EREMCHG RemoteSocketServiceError_SystemError = 78
- RemoteSocketServiceError_SYS_ELIBACC RemoteSocketServiceError_SystemError = 79
- RemoteSocketServiceError_SYS_ELIBBAD RemoteSocketServiceError_SystemError = 80
- RemoteSocketServiceError_SYS_ELIBSCN RemoteSocketServiceError_SystemError = 81
- RemoteSocketServiceError_SYS_ELIBMAX RemoteSocketServiceError_SystemError = 82
- RemoteSocketServiceError_SYS_ELIBEXEC RemoteSocketServiceError_SystemError = 83
- RemoteSocketServiceError_SYS_EILSEQ RemoteSocketServiceError_SystemError = 84
- RemoteSocketServiceError_SYS_ERESTART RemoteSocketServiceError_SystemError = 85
- RemoteSocketServiceError_SYS_ESTRPIPE RemoteSocketServiceError_SystemError = 86
- RemoteSocketServiceError_SYS_EUSERS RemoteSocketServiceError_SystemError = 87
- RemoteSocketServiceError_SYS_ENOTSOCK RemoteSocketServiceError_SystemError = 88
- RemoteSocketServiceError_SYS_EDESTADDRREQ RemoteSocketServiceError_SystemError = 89
- RemoteSocketServiceError_SYS_EMSGSIZE RemoteSocketServiceError_SystemError = 90
- RemoteSocketServiceError_SYS_EPROTOTYPE RemoteSocketServiceError_SystemError = 91
- RemoteSocketServiceError_SYS_ENOPROTOOPT RemoteSocketServiceError_SystemError = 92
- RemoteSocketServiceError_SYS_EPROTONOSUPPORT RemoteSocketServiceError_SystemError = 93
- RemoteSocketServiceError_SYS_ESOCKTNOSUPPORT RemoteSocketServiceError_SystemError = 94
- RemoteSocketServiceError_SYS_EOPNOTSUPP RemoteSocketServiceError_SystemError = 95
- RemoteSocketServiceError_SYS_ENOTSUP RemoteSocketServiceError_SystemError = 95
- RemoteSocketServiceError_SYS_EPFNOSUPPORT RemoteSocketServiceError_SystemError = 96
- RemoteSocketServiceError_SYS_EAFNOSUPPORT RemoteSocketServiceError_SystemError = 97
- RemoteSocketServiceError_SYS_EADDRINUSE RemoteSocketServiceError_SystemError = 98
- RemoteSocketServiceError_SYS_EADDRNOTAVAIL RemoteSocketServiceError_SystemError = 99
- RemoteSocketServiceError_SYS_ENETDOWN RemoteSocketServiceError_SystemError = 100
- RemoteSocketServiceError_SYS_ENETUNREACH RemoteSocketServiceError_SystemError = 101
- RemoteSocketServiceError_SYS_ENETRESET RemoteSocketServiceError_SystemError = 102
- RemoteSocketServiceError_SYS_ECONNABORTED RemoteSocketServiceError_SystemError = 103
- RemoteSocketServiceError_SYS_ECONNRESET RemoteSocketServiceError_SystemError = 104
- RemoteSocketServiceError_SYS_ENOBUFS RemoteSocketServiceError_SystemError = 105
- RemoteSocketServiceError_SYS_EISCONN RemoteSocketServiceError_SystemError = 106
- RemoteSocketServiceError_SYS_ENOTCONN RemoteSocketServiceError_SystemError = 107
- RemoteSocketServiceError_SYS_ESHUTDOWN RemoteSocketServiceError_SystemError = 108
- RemoteSocketServiceError_SYS_ETOOMANYREFS RemoteSocketServiceError_SystemError = 109
- RemoteSocketServiceError_SYS_ETIMEDOUT RemoteSocketServiceError_SystemError = 110
- RemoteSocketServiceError_SYS_ECONNREFUSED RemoteSocketServiceError_SystemError = 111
- RemoteSocketServiceError_SYS_EHOSTDOWN RemoteSocketServiceError_SystemError = 112
- RemoteSocketServiceError_SYS_EHOSTUNREACH RemoteSocketServiceError_SystemError = 113
- RemoteSocketServiceError_SYS_EALREADY RemoteSocketServiceError_SystemError = 114
- RemoteSocketServiceError_SYS_EINPROGRESS RemoteSocketServiceError_SystemError = 115
- RemoteSocketServiceError_SYS_ESTALE RemoteSocketServiceError_SystemError = 116
- RemoteSocketServiceError_SYS_EUCLEAN RemoteSocketServiceError_SystemError = 117
- RemoteSocketServiceError_SYS_ENOTNAM RemoteSocketServiceError_SystemError = 118
- RemoteSocketServiceError_SYS_ENAVAIL RemoteSocketServiceError_SystemError = 119
- RemoteSocketServiceError_SYS_EISNAM RemoteSocketServiceError_SystemError = 120
- RemoteSocketServiceError_SYS_EREMOTEIO RemoteSocketServiceError_SystemError = 121
- RemoteSocketServiceError_SYS_EDQUOT RemoteSocketServiceError_SystemError = 122
- RemoteSocketServiceError_SYS_ENOMEDIUM RemoteSocketServiceError_SystemError = 123
- RemoteSocketServiceError_SYS_EMEDIUMTYPE RemoteSocketServiceError_SystemError = 124
- RemoteSocketServiceError_SYS_ECANCELED RemoteSocketServiceError_SystemError = 125
- RemoteSocketServiceError_SYS_ENOKEY RemoteSocketServiceError_SystemError = 126
- RemoteSocketServiceError_SYS_EKEYEXPIRED RemoteSocketServiceError_SystemError = 127
- RemoteSocketServiceError_SYS_EKEYREVOKED RemoteSocketServiceError_SystemError = 128
- RemoteSocketServiceError_SYS_EKEYREJECTED RemoteSocketServiceError_SystemError = 129
- RemoteSocketServiceError_SYS_EOWNERDEAD RemoteSocketServiceError_SystemError = 130
- RemoteSocketServiceError_SYS_ENOTRECOVERABLE RemoteSocketServiceError_SystemError = 131
- RemoteSocketServiceError_SYS_ERFKILL RemoteSocketServiceError_SystemError = 132
-)
-
-var RemoteSocketServiceError_SystemError_name = map[int32]string{
- 0: "SYS_SUCCESS",
- 1: "SYS_EPERM",
- 2: "SYS_ENOENT",
- 3: "SYS_ESRCH",
- 4: "SYS_EINTR",
- 5: "SYS_EIO",
- 6: "SYS_ENXIO",
- 7: "SYS_E2BIG",
- 8: "SYS_ENOEXEC",
- 9: "SYS_EBADF",
- 10: "SYS_ECHILD",
- 11: "SYS_EAGAIN",
- // Duplicate value: 11: "SYS_EWOULDBLOCK",
- 12: "SYS_ENOMEM",
- 13: "SYS_EACCES",
- 14: "SYS_EFAULT",
- 15: "SYS_ENOTBLK",
- 16: "SYS_EBUSY",
- 17: "SYS_EEXIST",
- 18: "SYS_EXDEV",
- 19: "SYS_ENODEV",
- 20: "SYS_ENOTDIR",
- 21: "SYS_EISDIR",
- 22: "SYS_EINVAL",
- 23: "SYS_ENFILE",
- 24: "SYS_EMFILE",
- 25: "SYS_ENOTTY",
- 26: "SYS_ETXTBSY",
- 27: "SYS_EFBIG",
- 28: "SYS_ENOSPC",
- 29: "SYS_ESPIPE",
- 30: "SYS_EROFS",
- 31: "SYS_EMLINK",
- 32: "SYS_EPIPE",
- 33: "SYS_EDOM",
- 34: "SYS_ERANGE",
- 35: "SYS_EDEADLK",
- // Duplicate value: 35: "SYS_EDEADLOCK",
- 36: "SYS_ENAMETOOLONG",
- 37: "SYS_ENOLCK",
- 38: "SYS_ENOSYS",
- 39: "SYS_ENOTEMPTY",
- 40: "SYS_ELOOP",
- 42: "SYS_ENOMSG",
- 43: "SYS_EIDRM",
- 44: "SYS_ECHRNG",
- 45: "SYS_EL2NSYNC",
- 46: "SYS_EL3HLT",
- 47: "SYS_EL3RST",
- 48: "SYS_ELNRNG",
- 49: "SYS_EUNATCH",
- 50: "SYS_ENOCSI",
- 51: "SYS_EL2HLT",
- 52: "SYS_EBADE",
- 53: "SYS_EBADR",
- 54: "SYS_EXFULL",
- 55: "SYS_ENOANO",
- 56: "SYS_EBADRQC",
- 57: "SYS_EBADSLT",
- 59: "SYS_EBFONT",
- 60: "SYS_ENOSTR",
- 61: "SYS_ENODATA",
- 62: "SYS_ETIME",
- 63: "SYS_ENOSR",
- 64: "SYS_ENONET",
- 65: "SYS_ENOPKG",
- 66: "SYS_EREMOTE",
- 67: "SYS_ENOLINK",
- 68: "SYS_EADV",
- 69: "SYS_ESRMNT",
- 70: "SYS_ECOMM",
- 71: "SYS_EPROTO",
- 72: "SYS_EMULTIHOP",
- 73: "SYS_EDOTDOT",
- 74: "SYS_EBADMSG",
- 75: "SYS_EOVERFLOW",
- 76: "SYS_ENOTUNIQ",
- 77: "SYS_EBADFD",
- 78: "SYS_EREMCHG",
- 79: "SYS_ELIBACC",
- 80: "SYS_ELIBBAD",
- 81: "SYS_ELIBSCN",
- 82: "SYS_ELIBMAX",
- 83: "SYS_ELIBEXEC",
- 84: "SYS_EILSEQ",
- 85: "SYS_ERESTART",
- 86: "SYS_ESTRPIPE",
- 87: "SYS_EUSERS",
- 88: "SYS_ENOTSOCK",
- 89: "SYS_EDESTADDRREQ",
- 90: "SYS_EMSGSIZE",
- 91: "SYS_EPROTOTYPE",
- 92: "SYS_ENOPROTOOPT",
- 93: "SYS_EPROTONOSUPPORT",
- 94: "SYS_ESOCKTNOSUPPORT",
- 95: "SYS_EOPNOTSUPP",
- // Duplicate value: 95: "SYS_ENOTSUP",
- 96: "SYS_EPFNOSUPPORT",
- 97: "SYS_EAFNOSUPPORT",
- 98: "SYS_EADDRINUSE",
- 99: "SYS_EADDRNOTAVAIL",
- 100: "SYS_ENETDOWN",
- 101: "SYS_ENETUNREACH",
- 102: "SYS_ENETRESET",
- 103: "SYS_ECONNABORTED",
- 104: "SYS_ECONNRESET",
- 105: "SYS_ENOBUFS",
- 106: "SYS_EISCONN",
- 107: "SYS_ENOTCONN",
- 108: "SYS_ESHUTDOWN",
- 109: "SYS_ETOOMANYREFS",
- 110: "SYS_ETIMEDOUT",
- 111: "SYS_ECONNREFUSED",
- 112: "SYS_EHOSTDOWN",
- 113: "SYS_EHOSTUNREACH",
- 114: "SYS_EALREADY",
- 115: "SYS_EINPROGRESS",
- 116: "SYS_ESTALE",
- 117: "SYS_EUCLEAN",
- 118: "SYS_ENOTNAM",
- 119: "SYS_ENAVAIL",
- 120: "SYS_EISNAM",
- 121: "SYS_EREMOTEIO",
- 122: "SYS_EDQUOT",
- 123: "SYS_ENOMEDIUM",
- 124: "SYS_EMEDIUMTYPE",
- 125: "SYS_ECANCELED",
- 126: "SYS_ENOKEY",
- 127: "SYS_EKEYEXPIRED",
- 128: "SYS_EKEYREVOKED",
- 129: "SYS_EKEYREJECTED",
- 130: "SYS_EOWNERDEAD",
- 131: "SYS_ENOTRECOVERABLE",
- 132: "SYS_ERFKILL",
-}
-var RemoteSocketServiceError_SystemError_value = map[string]int32{
- "SYS_SUCCESS": 0,
- "SYS_EPERM": 1,
- "SYS_ENOENT": 2,
- "SYS_ESRCH": 3,
- "SYS_EINTR": 4,
- "SYS_EIO": 5,
- "SYS_ENXIO": 6,
- "SYS_E2BIG": 7,
- "SYS_ENOEXEC": 8,
- "SYS_EBADF": 9,
- "SYS_ECHILD": 10,
- "SYS_EAGAIN": 11,
- "SYS_EWOULDBLOCK": 11,
- "SYS_ENOMEM": 12,
- "SYS_EACCES": 13,
- "SYS_EFAULT": 14,
- "SYS_ENOTBLK": 15,
- "SYS_EBUSY": 16,
- "SYS_EEXIST": 17,
- "SYS_EXDEV": 18,
- "SYS_ENODEV": 19,
- "SYS_ENOTDIR": 20,
- "SYS_EISDIR": 21,
- "SYS_EINVAL": 22,
- "SYS_ENFILE": 23,
- "SYS_EMFILE": 24,
- "SYS_ENOTTY": 25,
- "SYS_ETXTBSY": 26,
- "SYS_EFBIG": 27,
- "SYS_ENOSPC": 28,
- "SYS_ESPIPE": 29,
- "SYS_EROFS": 30,
- "SYS_EMLINK": 31,
- "SYS_EPIPE": 32,
- "SYS_EDOM": 33,
- "SYS_ERANGE": 34,
- "SYS_EDEADLK": 35,
- "SYS_EDEADLOCK": 35,
- "SYS_ENAMETOOLONG": 36,
- "SYS_ENOLCK": 37,
- "SYS_ENOSYS": 38,
- "SYS_ENOTEMPTY": 39,
- "SYS_ELOOP": 40,
- "SYS_ENOMSG": 42,
- "SYS_EIDRM": 43,
- "SYS_ECHRNG": 44,
- "SYS_EL2NSYNC": 45,
- "SYS_EL3HLT": 46,
- "SYS_EL3RST": 47,
- "SYS_ELNRNG": 48,
- "SYS_EUNATCH": 49,
- "SYS_ENOCSI": 50,
- "SYS_EL2HLT": 51,
- "SYS_EBADE": 52,
- "SYS_EBADR": 53,
- "SYS_EXFULL": 54,
- "SYS_ENOANO": 55,
- "SYS_EBADRQC": 56,
- "SYS_EBADSLT": 57,
- "SYS_EBFONT": 59,
- "SYS_ENOSTR": 60,
- "SYS_ENODATA": 61,
- "SYS_ETIME": 62,
- "SYS_ENOSR": 63,
- "SYS_ENONET": 64,
- "SYS_ENOPKG": 65,
- "SYS_EREMOTE": 66,
- "SYS_ENOLINK": 67,
- "SYS_EADV": 68,
- "SYS_ESRMNT": 69,
- "SYS_ECOMM": 70,
- "SYS_EPROTO": 71,
- "SYS_EMULTIHOP": 72,
- "SYS_EDOTDOT": 73,
- "SYS_EBADMSG": 74,
- "SYS_EOVERFLOW": 75,
- "SYS_ENOTUNIQ": 76,
- "SYS_EBADFD": 77,
- "SYS_EREMCHG": 78,
- "SYS_ELIBACC": 79,
- "SYS_ELIBBAD": 80,
- "SYS_ELIBSCN": 81,
- "SYS_ELIBMAX": 82,
- "SYS_ELIBEXEC": 83,
- "SYS_EILSEQ": 84,
- "SYS_ERESTART": 85,
- "SYS_ESTRPIPE": 86,
- "SYS_EUSERS": 87,
- "SYS_ENOTSOCK": 88,
- "SYS_EDESTADDRREQ": 89,
- "SYS_EMSGSIZE": 90,
- "SYS_EPROTOTYPE": 91,
- "SYS_ENOPROTOOPT": 92,
- "SYS_EPROTONOSUPPORT": 93,
- "SYS_ESOCKTNOSUPPORT": 94,
- "SYS_EOPNOTSUPP": 95,
- "SYS_ENOTSUP": 95,
- "SYS_EPFNOSUPPORT": 96,
- "SYS_EAFNOSUPPORT": 97,
- "SYS_EADDRINUSE": 98,
- "SYS_EADDRNOTAVAIL": 99,
- "SYS_ENETDOWN": 100,
- "SYS_ENETUNREACH": 101,
- "SYS_ENETRESET": 102,
- "SYS_ECONNABORTED": 103,
- "SYS_ECONNRESET": 104,
- "SYS_ENOBUFS": 105,
- "SYS_EISCONN": 106,
- "SYS_ENOTCONN": 107,
- "SYS_ESHUTDOWN": 108,
- "SYS_ETOOMANYREFS": 109,
- "SYS_ETIMEDOUT": 110,
- "SYS_ECONNREFUSED": 111,
- "SYS_EHOSTDOWN": 112,
- "SYS_EHOSTUNREACH": 113,
- "SYS_EALREADY": 114,
- "SYS_EINPROGRESS": 115,
- "SYS_ESTALE": 116,
- "SYS_EUCLEAN": 117,
- "SYS_ENOTNAM": 118,
- "SYS_ENAVAIL": 119,
- "SYS_EISNAM": 120,
- "SYS_EREMOTEIO": 121,
- "SYS_EDQUOT": 122,
- "SYS_ENOMEDIUM": 123,
- "SYS_EMEDIUMTYPE": 124,
- "SYS_ECANCELED": 125,
- "SYS_ENOKEY": 126,
- "SYS_EKEYEXPIRED": 127,
- "SYS_EKEYREVOKED": 128,
- "SYS_EKEYREJECTED": 129,
- "SYS_EOWNERDEAD": 130,
- "SYS_ENOTRECOVERABLE": 131,
- "SYS_ERFKILL": 132,
-}
-
-func (x RemoteSocketServiceError_SystemError) Enum() *RemoteSocketServiceError_SystemError {
- p := new(RemoteSocketServiceError_SystemError)
- *p = x
- return p
-}
-func (x RemoteSocketServiceError_SystemError) String() string {
- return proto.EnumName(RemoteSocketServiceError_SystemError_name, int32(x))
-}
-func (x *RemoteSocketServiceError_SystemError) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(RemoteSocketServiceError_SystemError_value, data, "RemoteSocketServiceError_SystemError")
- if err != nil {
- return err
- }
- *x = RemoteSocketServiceError_SystemError(value)
- return nil
-}
-
-type CreateSocketRequest_SocketFamily int32
-
-const (
- CreateSocketRequest_IPv4 CreateSocketRequest_SocketFamily = 1
- CreateSocketRequest_IPv6 CreateSocketRequest_SocketFamily = 2
-)
-
-var CreateSocketRequest_SocketFamily_name = map[int32]string{
- 1: "IPv4",
- 2: "IPv6",
-}
-var CreateSocketRequest_SocketFamily_value = map[string]int32{
- "IPv4": 1,
- "IPv6": 2,
-}
-
-func (x CreateSocketRequest_SocketFamily) Enum() *CreateSocketRequest_SocketFamily {
- p := new(CreateSocketRequest_SocketFamily)
- *p = x
- return p
-}
-func (x CreateSocketRequest_SocketFamily) String() string {
- return proto.EnumName(CreateSocketRequest_SocketFamily_name, int32(x))
-}
-func (x *CreateSocketRequest_SocketFamily) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(CreateSocketRequest_SocketFamily_value, data, "CreateSocketRequest_SocketFamily")
- if err != nil {
- return err
- }
- *x = CreateSocketRequest_SocketFamily(value)
- return nil
-}
-
-type CreateSocketRequest_SocketProtocol int32
-
-const (
- CreateSocketRequest_TCP CreateSocketRequest_SocketProtocol = 1
- CreateSocketRequest_UDP CreateSocketRequest_SocketProtocol = 2
-)
-
-var CreateSocketRequest_SocketProtocol_name = map[int32]string{
- 1: "TCP",
- 2: "UDP",
-}
-var CreateSocketRequest_SocketProtocol_value = map[string]int32{
- "TCP": 1,
- "UDP": 2,
-}
-
-func (x CreateSocketRequest_SocketProtocol) Enum() *CreateSocketRequest_SocketProtocol {
- p := new(CreateSocketRequest_SocketProtocol)
- *p = x
- return p
-}
-func (x CreateSocketRequest_SocketProtocol) String() string {
- return proto.EnumName(CreateSocketRequest_SocketProtocol_name, int32(x))
-}
-func (x *CreateSocketRequest_SocketProtocol) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(CreateSocketRequest_SocketProtocol_value, data, "CreateSocketRequest_SocketProtocol")
- if err != nil {
- return err
- }
- *x = CreateSocketRequest_SocketProtocol(value)
- return nil
-}
-
-type SocketOption_SocketOptionLevel int32
-
-const (
- SocketOption_SOCKET_SOL_IP SocketOption_SocketOptionLevel = 0
- SocketOption_SOCKET_SOL_SOCKET SocketOption_SocketOptionLevel = 1
- SocketOption_SOCKET_SOL_TCP SocketOption_SocketOptionLevel = 6
- SocketOption_SOCKET_SOL_UDP SocketOption_SocketOptionLevel = 17
-)
-
-var SocketOption_SocketOptionLevel_name = map[int32]string{
- 0: "SOCKET_SOL_IP",
- 1: "SOCKET_SOL_SOCKET",
- 6: "SOCKET_SOL_TCP",
- 17: "SOCKET_SOL_UDP",
-}
-var SocketOption_SocketOptionLevel_value = map[string]int32{
- "SOCKET_SOL_IP": 0,
- "SOCKET_SOL_SOCKET": 1,
- "SOCKET_SOL_TCP": 6,
- "SOCKET_SOL_UDP": 17,
-}
-
-func (x SocketOption_SocketOptionLevel) Enum() *SocketOption_SocketOptionLevel {
- p := new(SocketOption_SocketOptionLevel)
- *p = x
- return p
-}
-func (x SocketOption_SocketOptionLevel) String() string {
- return proto.EnumName(SocketOption_SocketOptionLevel_name, int32(x))
-}
-func (x *SocketOption_SocketOptionLevel) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SocketOption_SocketOptionLevel_value, data, "SocketOption_SocketOptionLevel")
- if err != nil {
- return err
- }
- *x = SocketOption_SocketOptionLevel(value)
- return nil
-}
-
-type SocketOption_SocketOptionName int32
-
-const (
- SocketOption_SOCKET_SO_DEBUG SocketOption_SocketOptionName = 1
- SocketOption_SOCKET_SO_REUSEADDR SocketOption_SocketOptionName = 2
- SocketOption_SOCKET_SO_TYPE SocketOption_SocketOptionName = 3
- SocketOption_SOCKET_SO_ERROR SocketOption_SocketOptionName = 4
- SocketOption_SOCKET_SO_DONTROUTE SocketOption_SocketOptionName = 5
- SocketOption_SOCKET_SO_BROADCAST SocketOption_SocketOptionName = 6
- SocketOption_SOCKET_SO_SNDBUF SocketOption_SocketOptionName = 7
- SocketOption_SOCKET_SO_RCVBUF SocketOption_SocketOptionName = 8
- SocketOption_SOCKET_SO_KEEPALIVE SocketOption_SocketOptionName = 9
- SocketOption_SOCKET_SO_OOBINLINE SocketOption_SocketOptionName = 10
- SocketOption_SOCKET_SO_LINGER SocketOption_SocketOptionName = 13
- SocketOption_SOCKET_SO_RCVTIMEO SocketOption_SocketOptionName = 20
- SocketOption_SOCKET_SO_SNDTIMEO SocketOption_SocketOptionName = 21
- SocketOption_SOCKET_IP_TOS SocketOption_SocketOptionName = 1
- SocketOption_SOCKET_IP_TTL SocketOption_SocketOptionName = 2
- SocketOption_SOCKET_IP_HDRINCL SocketOption_SocketOptionName = 3
- SocketOption_SOCKET_IP_OPTIONS SocketOption_SocketOptionName = 4
- SocketOption_SOCKET_TCP_NODELAY SocketOption_SocketOptionName = 1
- SocketOption_SOCKET_TCP_MAXSEG SocketOption_SocketOptionName = 2
- SocketOption_SOCKET_TCP_CORK SocketOption_SocketOptionName = 3
- SocketOption_SOCKET_TCP_KEEPIDLE SocketOption_SocketOptionName = 4
- SocketOption_SOCKET_TCP_KEEPINTVL SocketOption_SocketOptionName = 5
- SocketOption_SOCKET_TCP_KEEPCNT SocketOption_SocketOptionName = 6
- SocketOption_SOCKET_TCP_SYNCNT SocketOption_SocketOptionName = 7
- SocketOption_SOCKET_TCP_LINGER2 SocketOption_SocketOptionName = 8
- SocketOption_SOCKET_TCP_DEFER_ACCEPT SocketOption_SocketOptionName = 9
- SocketOption_SOCKET_TCP_WINDOW_CLAMP SocketOption_SocketOptionName = 10
- SocketOption_SOCKET_TCP_INFO SocketOption_SocketOptionName = 11
- SocketOption_SOCKET_TCP_QUICKACK SocketOption_SocketOptionName = 12
-)
-
-var SocketOption_SocketOptionName_name = map[int32]string{
- 1: "SOCKET_SO_DEBUG",
- 2: "SOCKET_SO_REUSEADDR",
- 3: "SOCKET_SO_TYPE",
- 4: "SOCKET_SO_ERROR",
- 5: "SOCKET_SO_DONTROUTE",
- 6: "SOCKET_SO_BROADCAST",
- 7: "SOCKET_SO_SNDBUF",
- 8: "SOCKET_SO_RCVBUF",
- 9: "SOCKET_SO_KEEPALIVE",
- 10: "SOCKET_SO_OOBINLINE",
- 13: "SOCKET_SO_LINGER",
- 20: "SOCKET_SO_RCVTIMEO",
- 21: "SOCKET_SO_SNDTIMEO",
- // Duplicate value: 1: "SOCKET_IP_TOS",
- // Duplicate value: 2: "SOCKET_IP_TTL",
- // Duplicate value: 3: "SOCKET_IP_HDRINCL",
- // Duplicate value: 4: "SOCKET_IP_OPTIONS",
- // Duplicate value: 1: "SOCKET_TCP_NODELAY",
- // Duplicate value: 2: "SOCKET_TCP_MAXSEG",
- // Duplicate value: 3: "SOCKET_TCP_CORK",
- // Duplicate value: 4: "SOCKET_TCP_KEEPIDLE",
- // Duplicate value: 5: "SOCKET_TCP_KEEPINTVL",
- // Duplicate value: 6: "SOCKET_TCP_KEEPCNT",
- // Duplicate value: 7: "SOCKET_TCP_SYNCNT",
- // Duplicate value: 8: "SOCKET_TCP_LINGER2",
- // Duplicate value: 9: "SOCKET_TCP_DEFER_ACCEPT",
- // Duplicate value: 10: "SOCKET_TCP_WINDOW_CLAMP",
- 11: "SOCKET_TCP_INFO",
- 12: "SOCKET_TCP_QUICKACK",
-}
-var SocketOption_SocketOptionName_value = map[string]int32{
- "SOCKET_SO_DEBUG": 1,
- "SOCKET_SO_REUSEADDR": 2,
- "SOCKET_SO_TYPE": 3,
- "SOCKET_SO_ERROR": 4,
- "SOCKET_SO_DONTROUTE": 5,
- "SOCKET_SO_BROADCAST": 6,
- "SOCKET_SO_SNDBUF": 7,
- "SOCKET_SO_RCVBUF": 8,
- "SOCKET_SO_KEEPALIVE": 9,
- "SOCKET_SO_OOBINLINE": 10,
- "SOCKET_SO_LINGER": 13,
- "SOCKET_SO_RCVTIMEO": 20,
- "SOCKET_SO_SNDTIMEO": 21,
- "SOCKET_IP_TOS": 1,
- "SOCKET_IP_TTL": 2,
- "SOCKET_IP_HDRINCL": 3,
- "SOCKET_IP_OPTIONS": 4,
- "SOCKET_TCP_NODELAY": 1,
- "SOCKET_TCP_MAXSEG": 2,
- "SOCKET_TCP_CORK": 3,
- "SOCKET_TCP_KEEPIDLE": 4,
- "SOCKET_TCP_KEEPINTVL": 5,
- "SOCKET_TCP_KEEPCNT": 6,
- "SOCKET_TCP_SYNCNT": 7,
- "SOCKET_TCP_LINGER2": 8,
- "SOCKET_TCP_DEFER_ACCEPT": 9,
- "SOCKET_TCP_WINDOW_CLAMP": 10,
- "SOCKET_TCP_INFO": 11,
- "SOCKET_TCP_QUICKACK": 12,
-}
-
-func (x SocketOption_SocketOptionName) Enum() *SocketOption_SocketOptionName {
- p := new(SocketOption_SocketOptionName)
- *p = x
- return p
-}
-func (x SocketOption_SocketOptionName) String() string {
- return proto.EnumName(SocketOption_SocketOptionName_name, int32(x))
-}
-func (x *SocketOption_SocketOptionName) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SocketOption_SocketOptionName_value, data, "SocketOption_SocketOptionName")
- if err != nil {
- return err
- }
- *x = SocketOption_SocketOptionName(value)
- return nil
-}
-
-type ShutDownRequest_How int32
-
-const (
- ShutDownRequest_SOCKET_SHUT_RD ShutDownRequest_How = 1
- ShutDownRequest_SOCKET_SHUT_WR ShutDownRequest_How = 2
- ShutDownRequest_SOCKET_SHUT_RDWR ShutDownRequest_How = 3
-)
-
-var ShutDownRequest_How_name = map[int32]string{
- 1: "SOCKET_SHUT_RD",
- 2: "SOCKET_SHUT_WR",
- 3: "SOCKET_SHUT_RDWR",
-}
-var ShutDownRequest_How_value = map[string]int32{
- "SOCKET_SHUT_RD": 1,
- "SOCKET_SHUT_WR": 2,
- "SOCKET_SHUT_RDWR": 3,
-}
-
-func (x ShutDownRequest_How) Enum() *ShutDownRequest_How {
- p := new(ShutDownRequest_How)
- *p = x
- return p
-}
-func (x ShutDownRequest_How) String() string {
- return proto.EnumName(ShutDownRequest_How_name, int32(x))
-}
-func (x *ShutDownRequest_How) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ShutDownRequest_How_value, data, "ShutDownRequest_How")
- if err != nil {
- return err
- }
- *x = ShutDownRequest_How(value)
- return nil
-}
-
-type ReceiveRequest_Flags int32
-
-const (
- ReceiveRequest_MSG_OOB ReceiveRequest_Flags = 1
- ReceiveRequest_MSG_PEEK ReceiveRequest_Flags = 2
-)
-
-var ReceiveRequest_Flags_name = map[int32]string{
- 1: "MSG_OOB",
- 2: "MSG_PEEK",
-}
-var ReceiveRequest_Flags_value = map[string]int32{
- "MSG_OOB": 1,
- "MSG_PEEK": 2,
-}
-
-func (x ReceiveRequest_Flags) Enum() *ReceiveRequest_Flags {
- p := new(ReceiveRequest_Flags)
- *p = x
- return p
-}
-func (x ReceiveRequest_Flags) String() string {
- return proto.EnumName(ReceiveRequest_Flags_name, int32(x))
-}
-func (x *ReceiveRequest_Flags) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ReceiveRequest_Flags_value, data, "ReceiveRequest_Flags")
- if err != nil {
- return err
- }
- *x = ReceiveRequest_Flags(value)
- return nil
-}
-
-type PollEvent_PollEventFlag int32
-
-const (
- PollEvent_SOCKET_POLLNONE PollEvent_PollEventFlag = 0
- PollEvent_SOCKET_POLLIN PollEvent_PollEventFlag = 1
- PollEvent_SOCKET_POLLPRI PollEvent_PollEventFlag = 2
- PollEvent_SOCKET_POLLOUT PollEvent_PollEventFlag = 4
- PollEvent_SOCKET_POLLERR PollEvent_PollEventFlag = 8
- PollEvent_SOCKET_POLLHUP PollEvent_PollEventFlag = 16
- PollEvent_SOCKET_POLLNVAL PollEvent_PollEventFlag = 32
- PollEvent_SOCKET_POLLRDNORM PollEvent_PollEventFlag = 64
- PollEvent_SOCKET_POLLRDBAND PollEvent_PollEventFlag = 128
- PollEvent_SOCKET_POLLWRNORM PollEvent_PollEventFlag = 256
- PollEvent_SOCKET_POLLWRBAND PollEvent_PollEventFlag = 512
- PollEvent_SOCKET_POLLMSG PollEvent_PollEventFlag = 1024
- PollEvent_SOCKET_POLLREMOVE PollEvent_PollEventFlag = 4096
- PollEvent_SOCKET_POLLRDHUP PollEvent_PollEventFlag = 8192
-)
-
-var PollEvent_PollEventFlag_name = map[int32]string{
- 0: "SOCKET_POLLNONE",
- 1: "SOCKET_POLLIN",
- 2: "SOCKET_POLLPRI",
- 4: "SOCKET_POLLOUT",
- 8: "SOCKET_POLLERR",
- 16: "SOCKET_POLLHUP",
- 32: "SOCKET_POLLNVAL",
- 64: "SOCKET_POLLRDNORM",
- 128: "SOCKET_POLLRDBAND",
- 256: "SOCKET_POLLWRNORM",
- 512: "SOCKET_POLLWRBAND",
- 1024: "SOCKET_POLLMSG",
- 4096: "SOCKET_POLLREMOVE",
- 8192: "SOCKET_POLLRDHUP",
-}
-var PollEvent_PollEventFlag_value = map[string]int32{
- "SOCKET_POLLNONE": 0,
- "SOCKET_POLLIN": 1,
- "SOCKET_POLLPRI": 2,
- "SOCKET_POLLOUT": 4,
- "SOCKET_POLLERR": 8,
- "SOCKET_POLLHUP": 16,
- "SOCKET_POLLNVAL": 32,
- "SOCKET_POLLRDNORM": 64,
- "SOCKET_POLLRDBAND": 128,
- "SOCKET_POLLWRNORM": 256,
- "SOCKET_POLLWRBAND": 512,
- "SOCKET_POLLMSG": 1024,
- "SOCKET_POLLREMOVE": 4096,
- "SOCKET_POLLRDHUP": 8192,
-}
-
-func (x PollEvent_PollEventFlag) Enum() *PollEvent_PollEventFlag {
- p := new(PollEvent_PollEventFlag)
- *p = x
- return p
-}
-func (x PollEvent_PollEventFlag) String() string {
- return proto.EnumName(PollEvent_PollEventFlag_name, int32(x))
-}
-func (x *PollEvent_PollEventFlag) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(PollEvent_PollEventFlag_value, data, "PollEvent_PollEventFlag")
- if err != nil {
- return err
- }
- *x = PollEvent_PollEventFlag(value)
- return nil
-}
-
-type ResolveReply_ErrorCode int32
-
-const (
- ResolveReply_SOCKET_EAI_ADDRFAMILY ResolveReply_ErrorCode = 1
- ResolveReply_SOCKET_EAI_AGAIN ResolveReply_ErrorCode = 2
- ResolveReply_SOCKET_EAI_BADFLAGS ResolveReply_ErrorCode = 3
- ResolveReply_SOCKET_EAI_FAIL ResolveReply_ErrorCode = 4
- ResolveReply_SOCKET_EAI_FAMILY ResolveReply_ErrorCode = 5
- ResolveReply_SOCKET_EAI_MEMORY ResolveReply_ErrorCode = 6
- ResolveReply_SOCKET_EAI_NODATA ResolveReply_ErrorCode = 7
- ResolveReply_SOCKET_EAI_NONAME ResolveReply_ErrorCode = 8
- ResolveReply_SOCKET_EAI_SERVICE ResolveReply_ErrorCode = 9
- ResolveReply_SOCKET_EAI_SOCKTYPE ResolveReply_ErrorCode = 10
- ResolveReply_SOCKET_EAI_SYSTEM ResolveReply_ErrorCode = 11
- ResolveReply_SOCKET_EAI_BADHINTS ResolveReply_ErrorCode = 12
- ResolveReply_SOCKET_EAI_PROTOCOL ResolveReply_ErrorCode = 13
- ResolveReply_SOCKET_EAI_OVERFLOW ResolveReply_ErrorCode = 14
- ResolveReply_SOCKET_EAI_MAX ResolveReply_ErrorCode = 15
-)
-
-var ResolveReply_ErrorCode_name = map[int32]string{
- 1: "SOCKET_EAI_ADDRFAMILY",
- 2: "SOCKET_EAI_AGAIN",
- 3: "SOCKET_EAI_BADFLAGS",
- 4: "SOCKET_EAI_FAIL",
- 5: "SOCKET_EAI_FAMILY",
- 6: "SOCKET_EAI_MEMORY",
- 7: "SOCKET_EAI_NODATA",
- 8: "SOCKET_EAI_NONAME",
- 9: "SOCKET_EAI_SERVICE",
- 10: "SOCKET_EAI_SOCKTYPE",
- 11: "SOCKET_EAI_SYSTEM",
- 12: "SOCKET_EAI_BADHINTS",
- 13: "SOCKET_EAI_PROTOCOL",
- 14: "SOCKET_EAI_OVERFLOW",
- 15: "SOCKET_EAI_MAX",
-}
-var ResolveReply_ErrorCode_value = map[string]int32{
- "SOCKET_EAI_ADDRFAMILY": 1,
- "SOCKET_EAI_AGAIN": 2,
- "SOCKET_EAI_BADFLAGS": 3,
- "SOCKET_EAI_FAIL": 4,
- "SOCKET_EAI_FAMILY": 5,
- "SOCKET_EAI_MEMORY": 6,
- "SOCKET_EAI_NODATA": 7,
- "SOCKET_EAI_NONAME": 8,
- "SOCKET_EAI_SERVICE": 9,
- "SOCKET_EAI_SOCKTYPE": 10,
- "SOCKET_EAI_SYSTEM": 11,
- "SOCKET_EAI_BADHINTS": 12,
- "SOCKET_EAI_PROTOCOL": 13,
- "SOCKET_EAI_OVERFLOW": 14,
- "SOCKET_EAI_MAX": 15,
-}
-
-func (x ResolveReply_ErrorCode) Enum() *ResolveReply_ErrorCode {
- p := new(ResolveReply_ErrorCode)
- *p = x
- return p
-}
-func (x ResolveReply_ErrorCode) String() string {
- return proto.EnumName(ResolveReply_ErrorCode_name, int32(x))
-}
-func (x *ResolveReply_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(ResolveReply_ErrorCode_value, data, "ResolveReply_ErrorCode")
- if err != nil {
- return err
- }
- *x = ResolveReply_ErrorCode(value)
- return nil
-}
-
-type RemoteSocketServiceError struct {
- SystemError *int32 `protobuf:"varint,1,opt,name=system_error,def=0" json:"system_error,omitempty"`
- ErrorDetail *string `protobuf:"bytes,2,opt,name=error_detail" json:"error_detail,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *RemoteSocketServiceError) Reset() { *m = RemoteSocketServiceError{} }
-func (m *RemoteSocketServiceError) String() string { return proto.CompactTextString(m) }
-func (*RemoteSocketServiceError) ProtoMessage() {}
-
-const Default_RemoteSocketServiceError_SystemError int32 = 0
-
-func (m *RemoteSocketServiceError) GetSystemError() int32 {
- if m != nil && m.SystemError != nil {
- return *m.SystemError
- }
- return Default_RemoteSocketServiceError_SystemError
-}
-
-func (m *RemoteSocketServiceError) GetErrorDetail() string {
- if m != nil && m.ErrorDetail != nil {
- return *m.ErrorDetail
- }
- return ""
-}
-
-type AddressPort struct {
- Port *int32 `protobuf:"varint,1,req,name=port" json:"port,omitempty"`
- PackedAddress []byte `protobuf:"bytes,2,opt,name=packed_address" json:"packed_address,omitempty"`
- HostnameHint *string `protobuf:"bytes,3,opt,name=hostname_hint" json:"hostname_hint,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AddressPort) Reset() { *m = AddressPort{} }
-func (m *AddressPort) String() string { return proto.CompactTextString(m) }
-func (*AddressPort) ProtoMessage() {}
-
-func (m *AddressPort) GetPort() int32 {
- if m != nil && m.Port != nil {
- return *m.Port
- }
- return 0
-}
-
-func (m *AddressPort) GetPackedAddress() []byte {
- if m != nil {
- return m.PackedAddress
- }
- return nil
-}
-
-func (m *AddressPort) GetHostnameHint() string {
- if m != nil && m.HostnameHint != nil {
- return *m.HostnameHint
- }
- return ""
-}
-
-type CreateSocketRequest struct {
- Family *CreateSocketRequest_SocketFamily `protobuf:"varint,1,req,name=family,enum=appengine.CreateSocketRequest_SocketFamily" json:"family,omitempty"`
- Protocol *CreateSocketRequest_SocketProtocol `protobuf:"varint,2,req,name=protocol,enum=appengine.CreateSocketRequest_SocketProtocol" json:"protocol,omitempty"`
- SocketOptions []*SocketOption `protobuf:"bytes,3,rep,name=socket_options" json:"socket_options,omitempty"`
- ProxyExternalIp *AddressPort `protobuf:"bytes,4,opt,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- ListenBacklog *int32 `protobuf:"varint,5,opt,name=listen_backlog,def=0" json:"listen_backlog,omitempty"`
- RemoteIp *AddressPort `protobuf:"bytes,6,opt,name=remote_ip" json:"remote_ip,omitempty"`
- AppId *string `protobuf:"bytes,9,opt,name=app_id" json:"app_id,omitempty"`
- ProjectId *int64 `protobuf:"varint,10,opt,name=project_id" json:"project_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateSocketRequest) Reset() { *m = CreateSocketRequest{} }
-func (m *CreateSocketRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateSocketRequest) ProtoMessage() {}
-
-const Default_CreateSocketRequest_ListenBacklog int32 = 0
-
-func (m *CreateSocketRequest) GetFamily() CreateSocketRequest_SocketFamily {
- if m != nil && m.Family != nil {
- return *m.Family
- }
- return CreateSocketRequest_IPv4
-}
-
-func (m *CreateSocketRequest) GetProtocol() CreateSocketRequest_SocketProtocol {
- if m != nil && m.Protocol != nil {
- return *m.Protocol
- }
- return CreateSocketRequest_TCP
-}
-
-func (m *CreateSocketRequest) GetSocketOptions() []*SocketOption {
- if m != nil {
- return m.SocketOptions
- }
- return nil
-}
-
-func (m *CreateSocketRequest) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-func (m *CreateSocketRequest) GetListenBacklog() int32 {
- if m != nil && m.ListenBacklog != nil {
- return *m.ListenBacklog
- }
- return Default_CreateSocketRequest_ListenBacklog
-}
-
-func (m *CreateSocketRequest) GetRemoteIp() *AddressPort {
- if m != nil {
- return m.RemoteIp
- }
- return nil
-}
-
-func (m *CreateSocketRequest) GetAppId() string {
- if m != nil && m.AppId != nil {
- return *m.AppId
- }
- return ""
-}
-
-func (m *CreateSocketRequest) GetProjectId() int64 {
- if m != nil && m.ProjectId != nil {
- return *m.ProjectId
- }
- return 0
-}
-
-type CreateSocketReply struct {
- SocketDescriptor *string `protobuf:"bytes,1,opt,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- ServerAddress *AddressPort `protobuf:"bytes,3,opt,name=server_address" json:"server_address,omitempty"`
- ProxyExternalIp *AddressPort `protobuf:"bytes,4,opt,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- XXX_extensions map[int32]proto.Extension `json:"-"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateSocketReply) Reset() { *m = CreateSocketReply{} }
-func (m *CreateSocketReply) String() string { return proto.CompactTextString(m) }
-func (*CreateSocketReply) ProtoMessage() {}
-
-var extRange_CreateSocketReply = []proto.ExtensionRange{
- {1000, 536870911},
-}
-
-func (*CreateSocketReply) ExtensionRangeArray() []proto.ExtensionRange {
- return extRange_CreateSocketReply
-}
-func (m *CreateSocketReply) ExtensionMap() map[int32]proto.Extension {
- if m.XXX_extensions == nil {
- m.XXX_extensions = make(map[int32]proto.Extension)
- }
- return m.XXX_extensions
-}
-
-func (m *CreateSocketReply) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *CreateSocketReply) GetServerAddress() *AddressPort {
- if m != nil {
- return m.ServerAddress
- }
- return nil
-}
-
-func (m *CreateSocketReply) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-type BindRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- ProxyExternalIp *AddressPort `protobuf:"bytes,2,req,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BindRequest) Reset() { *m = BindRequest{} }
-func (m *BindRequest) String() string { return proto.CompactTextString(m) }
-func (*BindRequest) ProtoMessage() {}
-
-func (m *BindRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *BindRequest) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-type BindReply struct {
- ProxyExternalIp *AddressPort `protobuf:"bytes,1,opt,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BindReply) Reset() { *m = BindReply{} }
-func (m *BindReply) String() string { return proto.CompactTextString(m) }
-func (*BindReply) ProtoMessage() {}
-
-func (m *BindReply) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-type GetSocketNameRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSocketNameRequest) Reset() { *m = GetSocketNameRequest{} }
-func (m *GetSocketNameRequest) String() string { return proto.CompactTextString(m) }
-func (*GetSocketNameRequest) ProtoMessage() {}
-
-func (m *GetSocketNameRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-type GetSocketNameReply struct {
- ProxyExternalIp *AddressPort `protobuf:"bytes,2,opt,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSocketNameReply) Reset() { *m = GetSocketNameReply{} }
-func (m *GetSocketNameReply) String() string { return proto.CompactTextString(m) }
-func (*GetSocketNameReply) ProtoMessage() {}
-
-func (m *GetSocketNameReply) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-type GetPeerNameRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetPeerNameRequest) Reset() { *m = GetPeerNameRequest{} }
-func (m *GetPeerNameRequest) String() string { return proto.CompactTextString(m) }
-func (*GetPeerNameRequest) ProtoMessage() {}
-
-func (m *GetPeerNameRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-type GetPeerNameReply struct {
- PeerIp *AddressPort `protobuf:"bytes,2,opt,name=peer_ip" json:"peer_ip,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetPeerNameReply) Reset() { *m = GetPeerNameReply{} }
-func (m *GetPeerNameReply) String() string { return proto.CompactTextString(m) }
-func (*GetPeerNameReply) ProtoMessage() {}
-
-func (m *GetPeerNameReply) GetPeerIp() *AddressPort {
- if m != nil {
- return m.PeerIp
- }
- return nil
-}
-
-type SocketOption struct {
- Level *SocketOption_SocketOptionLevel `protobuf:"varint,1,req,name=level,enum=appengine.SocketOption_SocketOptionLevel" json:"level,omitempty"`
- Option *SocketOption_SocketOptionName `protobuf:"varint,2,req,name=option,enum=appengine.SocketOption_SocketOptionName" json:"option,omitempty"`
- Value []byte `protobuf:"bytes,3,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SocketOption) Reset() { *m = SocketOption{} }
-func (m *SocketOption) String() string { return proto.CompactTextString(m) }
-func (*SocketOption) ProtoMessage() {}
-
-func (m *SocketOption) GetLevel() SocketOption_SocketOptionLevel {
- if m != nil && m.Level != nil {
- return *m.Level
- }
- return SocketOption_SOCKET_SOL_IP
-}
-
-func (m *SocketOption) GetOption() SocketOption_SocketOptionName {
- if m != nil && m.Option != nil {
- return *m.Option
- }
- return SocketOption_SOCKET_SO_DEBUG
-}
-
-func (m *SocketOption) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type SetSocketOptionsRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- Options []*SocketOption `protobuf:"bytes,2,rep,name=options" json:"options,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SetSocketOptionsRequest) Reset() { *m = SetSocketOptionsRequest{} }
-func (m *SetSocketOptionsRequest) String() string { return proto.CompactTextString(m) }
-func (*SetSocketOptionsRequest) ProtoMessage() {}
-
-func (m *SetSocketOptionsRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *SetSocketOptionsRequest) GetOptions() []*SocketOption {
- if m != nil {
- return m.Options
- }
- return nil
-}
-
-type SetSocketOptionsReply struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SetSocketOptionsReply) Reset() { *m = SetSocketOptionsReply{} }
-func (m *SetSocketOptionsReply) String() string { return proto.CompactTextString(m) }
-func (*SetSocketOptionsReply) ProtoMessage() {}
-
-type GetSocketOptionsRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- Options []*SocketOption `protobuf:"bytes,2,rep,name=options" json:"options,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSocketOptionsRequest) Reset() { *m = GetSocketOptionsRequest{} }
-func (m *GetSocketOptionsRequest) String() string { return proto.CompactTextString(m) }
-func (*GetSocketOptionsRequest) ProtoMessage() {}
-
-func (m *GetSocketOptionsRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *GetSocketOptionsRequest) GetOptions() []*SocketOption {
- if m != nil {
- return m.Options
- }
- return nil
-}
-
-type GetSocketOptionsReply struct {
- Options []*SocketOption `protobuf:"bytes,2,rep,name=options" json:"options,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSocketOptionsReply) Reset() { *m = GetSocketOptionsReply{} }
-func (m *GetSocketOptionsReply) String() string { return proto.CompactTextString(m) }
-func (*GetSocketOptionsReply) ProtoMessage() {}
-
-func (m *GetSocketOptionsReply) GetOptions() []*SocketOption {
- if m != nil {
- return m.Options
- }
- return nil
-}
-
-type ConnectRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- RemoteIp *AddressPort `protobuf:"bytes,2,req,name=remote_ip" json:"remote_ip,omitempty"`
- TimeoutSeconds *float64 `protobuf:"fixed64,3,opt,name=timeout_seconds,def=-1" json:"timeout_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ConnectRequest) Reset() { *m = ConnectRequest{} }
-func (m *ConnectRequest) String() string { return proto.CompactTextString(m) }
-func (*ConnectRequest) ProtoMessage() {}
-
-const Default_ConnectRequest_TimeoutSeconds float64 = -1
-
-func (m *ConnectRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *ConnectRequest) GetRemoteIp() *AddressPort {
- if m != nil {
- return m.RemoteIp
- }
- return nil
-}
-
-func (m *ConnectRequest) GetTimeoutSeconds() float64 {
- if m != nil && m.TimeoutSeconds != nil {
- return *m.TimeoutSeconds
- }
- return Default_ConnectRequest_TimeoutSeconds
-}
-
-type ConnectReply struct {
- ProxyExternalIp *AddressPort `protobuf:"bytes,1,opt,name=proxy_external_ip" json:"proxy_external_ip,omitempty"`
- XXX_extensions map[int32]proto.Extension `json:"-"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ConnectReply) Reset() { *m = ConnectReply{} }
-func (m *ConnectReply) String() string { return proto.CompactTextString(m) }
-func (*ConnectReply) ProtoMessage() {}
-
-var extRange_ConnectReply = []proto.ExtensionRange{
- {1000, 536870911},
-}
-
-func (*ConnectReply) ExtensionRangeArray() []proto.ExtensionRange {
- return extRange_ConnectReply
-}
-func (m *ConnectReply) ExtensionMap() map[int32]proto.Extension {
- if m.XXX_extensions == nil {
- m.XXX_extensions = make(map[int32]proto.Extension)
- }
- return m.XXX_extensions
-}
-
-func (m *ConnectReply) GetProxyExternalIp() *AddressPort {
- if m != nil {
- return m.ProxyExternalIp
- }
- return nil
-}
-
-type ListenRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- Backlog *int32 `protobuf:"varint,2,req,name=backlog" json:"backlog,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListenRequest) Reset() { *m = ListenRequest{} }
-func (m *ListenRequest) String() string { return proto.CompactTextString(m) }
-func (*ListenRequest) ProtoMessage() {}
-
-func (m *ListenRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *ListenRequest) GetBacklog() int32 {
- if m != nil && m.Backlog != nil {
- return *m.Backlog
- }
- return 0
-}
-
-type ListenReply struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ListenReply) Reset() { *m = ListenReply{} }
-func (m *ListenReply) String() string { return proto.CompactTextString(m) }
-func (*ListenReply) ProtoMessage() {}
-
-type AcceptRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- TimeoutSeconds *float64 `protobuf:"fixed64,2,opt,name=timeout_seconds,def=-1" json:"timeout_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AcceptRequest) Reset() { *m = AcceptRequest{} }
-func (m *AcceptRequest) String() string { return proto.CompactTextString(m) }
-func (*AcceptRequest) ProtoMessage() {}
-
-const Default_AcceptRequest_TimeoutSeconds float64 = -1
-
-func (m *AcceptRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *AcceptRequest) GetTimeoutSeconds() float64 {
- if m != nil && m.TimeoutSeconds != nil {
- return *m.TimeoutSeconds
- }
- return Default_AcceptRequest_TimeoutSeconds
-}
-
-type AcceptReply struct {
- NewSocketDescriptor []byte `protobuf:"bytes,2,opt,name=new_socket_descriptor" json:"new_socket_descriptor,omitempty"`
- RemoteAddress *AddressPort `protobuf:"bytes,3,opt,name=remote_address" json:"remote_address,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *AcceptReply) Reset() { *m = AcceptReply{} }
-func (m *AcceptReply) String() string { return proto.CompactTextString(m) }
-func (*AcceptReply) ProtoMessage() {}
-
-func (m *AcceptReply) GetNewSocketDescriptor() []byte {
- if m != nil {
- return m.NewSocketDescriptor
- }
- return nil
-}
-
-func (m *AcceptReply) GetRemoteAddress() *AddressPort {
- if m != nil {
- return m.RemoteAddress
- }
- return nil
-}
-
-type ShutDownRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- How *ShutDownRequest_How `protobuf:"varint,2,req,name=how,enum=appengine.ShutDownRequest_How" json:"how,omitempty"`
- SendOffset *int64 `protobuf:"varint,3,req,name=send_offset" json:"send_offset,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ShutDownRequest) Reset() { *m = ShutDownRequest{} }
-func (m *ShutDownRequest) String() string { return proto.CompactTextString(m) }
-func (*ShutDownRequest) ProtoMessage() {}
-
-func (m *ShutDownRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *ShutDownRequest) GetHow() ShutDownRequest_How {
- if m != nil && m.How != nil {
- return *m.How
- }
- return ShutDownRequest_SOCKET_SHUT_RD
-}
-
-func (m *ShutDownRequest) GetSendOffset() int64 {
- if m != nil && m.SendOffset != nil {
- return *m.SendOffset
- }
- return 0
-}
-
-type ShutDownReply struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ShutDownReply) Reset() { *m = ShutDownReply{} }
-func (m *ShutDownReply) String() string { return proto.CompactTextString(m) }
-func (*ShutDownReply) ProtoMessage() {}
-
-type CloseRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- SendOffset *int64 `protobuf:"varint,2,opt,name=send_offset,def=-1" json:"send_offset,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CloseRequest) Reset() { *m = CloseRequest{} }
-func (m *CloseRequest) String() string { return proto.CompactTextString(m) }
-func (*CloseRequest) ProtoMessage() {}
-
-const Default_CloseRequest_SendOffset int64 = -1
-
-func (m *CloseRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *CloseRequest) GetSendOffset() int64 {
- if m != nil && m.SendOffset != nil {
- return *m.SendOffset
- }
- return Default_CloseRequest_SendOffset
-}
-
-type CloseReply struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CloseReply) Reset() { *m = CloseReply{} }
-func (m *CloseReply) String() string { return proto.CompactTextString(m) }
-func (*CloseReply) ProtoMessage() {}
-
-type SendRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- Data []byte `protobuf:"bytes,2,req,name=data" json:"data,omitempty"`
- StreamOffset *int64 `protobuf:"varint,3,req,name=stream_offset" json:"stream_offset,omitempty"`
- Flags *int32 `protobuf:"varint,4,opt,name=flags,def=0" json:"flags,omitempty"`
- SendTo *AddressPort `protobuf:"bytes,5,opt,name=send_to" json:"send_to,omitempty"`
- TimeoutSeconds *float64 `protobuf:"fixed64,6,opt,name=timeout_seconds,def=-1" json:"timeout_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SendRequest) Reset() { *m = SendRequest{} }
-func (m *SendRequest) String() string { return proto.CompactTextString(m) }
-func (*SendRequest) ProtoMessage() {}
-
-const Default_SendRequest_Flags int32 = 0
-const Default_SendRequest_TimeoutSeconds float64 = -1
-
-func (m *SendRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *SendRequest) GetData() []byte {
- if m != nil {
- return m.Data
- }
- return nil
-}
-
-func (m *SendRequest) GetStreamOffset() int64 {
- if m != nil && m.StreamOffset != nil {
- return *m.StreamOffset
- }
- return 0
-}
-
-func (m *SendRequest) GetFlags() int32 {
- if m != nil && m.Flags != nil {
- return *m.Flags
- }
- return Default_SendRequest_Flags
-}
-
-func (m *SendRequest) GetSendTo() *AddressPort {
- if m != nil {
- return m.SendTo
- }
- return nil
-}
-
-func (m *SendRequest) GetTimeoutSeconds() float64 {
- if m != nil && m.TimeoutSeconds != nil {
- return *m.TimeoutSeconds
- }
- return Default_SendRequest_TimeoutSeconds
-}
-
-type SendReply struct {
- DataSent *int32 `protobuf:"varint,1,opt,name=data_sent" json:"data_sent,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SendReply) Reset() { *m = SendReply{} }
-func (m *SendReply) String() string { return proto.CompactTextString(m) }
-func (*SendReply) ProtoMessage() {}
-
-func (m *SendReply) GetDataSent() int32 {
- if m != nil && m.DataSent != nil {
- return *m.DataSent
- }
- return 0
-}
-
-type ReceiveRequest struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- DataSize *int32 `protobuf:"varint,2,req,name=data_size" json:"data_size,omitempty"`
- Flags *int32 `protobuf:"varint,3,opt,name=flags,def=0" json:"flags,omitempty"`
- TimeoutSeconds *float64 `protobuf:"fixed64,5,opt,name=timeout_seconds,def=-1" json:"timeout_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ReceiveRequest) Reset() { *m = ReceiveRequest{} }
-func (m *ReceiveRequest) String() string { return proto.CompactTextString(m) }
-func (*ReceiveRequest) ProtoMessage() {}
-
-const Default_ReceiveRequest_Flags int32 = 0
-const Default_ReceiveRequest_TimeoutSeconds float64 = -1
-
-func (m *ReceiveRequest) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *ReceiveRequest) GetDataSize() int32 {
- if m != nil && m.DataSize != nil {
- return *m.DataSize
- }
- return 0
-}
-
-func (m *ReceiveRequest) GetFlags() int32 {
- if m != nil && m.Flags != nil {
- return *m.Flags
- }
- return Default_ReceiveRequest_Flags
-}
-
-func (m *ReceiveRequest) GetTimeoutSeconds() float64 {
- if m != nil && m.TimeoutSeconds != nil {
- return *m.TimeoutSeconds
- }
- return Default_ReceiveRequest_TimeoutSeconds
-}
-
-type ReceiveReply struct {
- StreamOffset *int64 `protobuf:"varint,2,opt,name=stream_offset" json:"stream_offset,omitempty"`
- Data []byte `protobuf:"bytes,3,opt,name=data" json:"data,omitempty"`
- ReceivedFrom *AddressPort `protobuf:"bytes,4,opt,name=received_from" json:"received_from,omitempty"`
- BufferSize *int32 `protobuf:"varint,5,opt,name=buffer_size" json:"buffer_size,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ReceiveReply) Reset() { *m = ReceiveReply{} }
-func (m *ReceiveReply) String() string { return proto.CompactTextString(m) }
-func (*ReceiveReply) ProtoMessage() {}
-
-func (m *ReceiveReply) GetStreamOffset() int64 {
- if m != nil && m.StreamOffset != nil {
- return *m.StreamOffset
- }
- return 0
-}
-
-func (m *ReceiveReply) GetData() []byte {
- if m != nil {
- return m.Data
- }
- return nil
-}
-
-func (m *ReceiveReply) GetReceivedFrom() *AddressPort {
- if m != nil {
- return m.ReceivedFrom
- }
- return nil
-}
-
-func (m *ReceiveReply) GetBufferSize() int32 {
- if m != nil && m.BufferSize != nil {
- return *m.BufferSize
- }
- return 0
-}
-
-type PollEvent struct {
- SocketDescriptor *string `protobuf:"bytes,1,req,name=socket_descriptor" json:"socket_descriptor,omitempty"`
- RequestedEvents *int32 `protobuf:"varint,2,req,name=requested_events" json:"requested_events,omitempty"`
- ObservedEvents *int32 `protobuf:"varint,3,req,name=observed_events" json:"observed_events,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PollEvent) Reset() { *m = PollEvent{} }
-func (m *PollEvent) String() string { return proto.CompactTextString(m) }
-func (*PollEvent) ProtoMessage() {}
-
-func (m *PollEvent) GetSocketDescriptor() string {
- if m != nil && m.SocketDescriptor != nil {
- return *m.SocketDescriptor
- }
- return ""
-}
-
-func (m *PollEvent) GetRequestedEvents() int32 {
- if m != nil && m.RequestedEvents != nil {
- return *m.RequestedEvents
- }
- return 0
-}
-
-func (m *PollEvent) GetObservedEvents() int32 {
- if m != nil && m.ObservedEvents != nil {
- return *m.ObservedEvents
- }
- return 0
-}
-
-type PollRequest struct {
- Events []*PollEvent `protobuf:"bytes,1,rep,name=events" json:"events,omitempty"`
- TimeoutSeconds *float64 `protobuf:"fixed64,2,opt,name=timeout_seconds,def=-1" json:"timeout_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PollRequest) Reset() { *m = PollRequest{} }
-func (m *PollRequest) String() string { return proto.CompactTextString(m) }
-func (*PollRequest) ProtoMessage() {}
-
-const Default_PollRequest_TimeoutSeconds float64 = -1
-
-func (m *PollRequest) GetEvents() []*PollEvent {
- if m != nil {
- return m.Events
- }
- return nil
-}
-
-func (m *PollRequest) GetTimeoutSeconds() float64 {
- if m != nil && m.TimeoutSeconds != nil {
- return *m.TimeoutSeconds
- }
- return Default_PollRequest_TimeoutSeconds
-}
-
-type PollReply struct {
- Events []*PollEvent `protobuf:"bytes,2,rep,name=events" json:"events,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PollReply) Reset() { *m = PollReply{} }
-func (m *PollReply) String() string { return proto.CompactTextString(m) }
-func (*PollReply) ProtoMessage() {}
-
-func (m *PollReply) GetEvents() []*PollEvent {
- if m != nil {
- return m.Events
- }
- return nil
-}
-
-type ResolveRequest struct {
- Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
- AddressFamilies []CreateSocketRequest_SocketFamily `protobuf:"varint,2,rep,name=address_families,enum=appengine.CreateSocketRequest_SocketFamily" json:"address_families,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ResolveRequest) Reset() { *m = ResolveRequest{} }
-func (m *ResolveRequest) String() string { return proto.CompactTextString(m) }
-func (*ResolveRequest) ProtoMessage() {}
-
-func (m *ResolveRequest) GetName() string {
- if m != nil && m.Name != nil {
- return *m.Name
- }
- return ""
-}
-
-func (m *ResolveRequest) GetAddressFamilies() []CreateSocketRequest_SocketFamily {
- if m != nil {
- return m.AddressFamilies
- }
- return nil
-}
-
-type ResolveReply struct {
- PackedAddress [][]byte `protobuf:"bytes,2,rep,name=packed_address" json:"packed_address,omitempty"`
- CanonicalName *string `protobuf:"bytes,3,opt,name=canonical_name" json:"canonical_name,omitempty"`
- Aliases []string `protobuf:"bytes,4,rep,name=aliases" json:"aliases,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *ResolveReply) Reset() { *m = ResolveReply{} }
-func (m *ResolveReply) String() string { return proto.CompactTextString(m) }
-func (*ResolveReply) ProtoMessage() {}
-
-func (m *ResolveReply) GetPackedAddress() [][]byte {
- if m != nil {
- return m.PackedAddress
- }
- return nil
-}
-
-func (m *ResolveReply) GetCanonicalName() string {
- if m != nil && m.CanonicalName != nil {
- return *m.CanonicalName
- }
- return ""
-}
-
-func (m *ResolveReply) GetAliases() []string {
- if m != nil {
- return m.Aliases
- }
- return nil
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/socket/socket_service.proto b/vendor/google.golang.org/appengine/internal/socket/socket_service.proto
deleted file mode 100644
index 2fcc795..0000000
--- a/vendor/google.golang.org/appengine/internal/socket/socket_service.proto
+++ /dev/null
@@ -1,460 +0,0 @@
-syntax = "proto2";
-option go_package = "socket";
-
-package appengine;
-
-message RemoteSocketServiceError {
- enum ErrorCode {
- SYSTEM_ERROR = 1;
- GAI_ERROR = 2;
- FAILURE = 4;
- PERMISSION_DENIED = 5;
- INVALID_REQUEST = 6;
- SOCKET_CLOSED = 7;
- }
-
- enum SystemError {
- option allow_alias = true;
-
- SYS_SUCCESS = 0;
- SYS_EPERM = 1;
- SYS_ENOENT = 2;
- SYS_ESRCH = 3;
- SYS_EINTR = 4;
- SYS_EIO = 5;
- SYS_ENXIO = 6;
- SYS_E2BIG = 7;
- SYS_ENOEXEC = 8;
- SYS_EBADF = 9;
- SYS_ECHILD = 10;
- SYS_EAGAIN = 11;
- SYS_EWOULDBLOCK = 11;
- SYS_ENOMEM = 12;
- SYS_EACCES = 13;
- SYS_EFAULT = 14;
- SYS_ENOTBLK = 15;
- SYS_EBUSY = 16;
- SYS_EEXIST = 17;
- SYS_EXDEV = 18;
- SYS_ENODEV = 19;
- SYS_ENOTDIR = 20;
- SYS_EISDIR = 21;
- SYS_EINVAL = 22;
- SYS_ENFILE = 23;
- SYS_EMFILE = 24;
- SYS_ENOTTY = 25;
- SYS_ETXTBSY = 26;
- SYS_EFBIG = 27;
- SYS_ENOSPC = 28;
- SYS_ESPIPE = 29;
- SYS_EROFS = 30;
- SYS_EMLINK = 31;
- SYS_EPIPE = 32;
- SYS_EDOM = 33;
- SYS_ERANGE = 34;
- SYS_EDEADLK = 35;
- SYS_EDEADLOCK = 35;
- SYS_ENAMETOOLONG = 36;
- SYS_ENOLCK = 37;
- SYS_ENOSYS = 38;
- SYS_ENOTEMPTY = 39;
- SYS_ELOOP = 40;
- SYS_ENOMSG = 42;
- SYS_EIDRM = 43;
- SYS_ECHRNG = 44;
- SYS_EL2NSYNC = 45;
- SYS_EL3HLT = 46;
- SYS_EL3RST = 47;
- SYS_ELNRNG = 48;
- SYS_EUNATCH = 49;
- SYS_ENOCSI = 50;
- SYS_EL2HLT = 51;
- SYS_EBADE = 52;
- SYS_EBADR = 53;
- SYS_EXFULL = 54;
- SYS_ENOANO = 55;
- SYS_EBADRQC = 56;
- SYS_EBADSLT = 57;
- SYS_EBFONT = 59;
- SYS_ENOSTR = 60;
- SYS_ENODATA = 61;
- SYS_ETIME = 62;
- SYS_ENOSR = 63;
- SYS_ENONET = 64;
- SYS_ENOPKG = 65;
- SYS_EREMOTE = 66;
- SYS_ENOLINK = 67;
- SYS_EADV = 68;
- SYS_ESRMNT = 69;
- SYS_ECOMM = 70;
- SYS_EPROTO = 71;
- SYS_EMULTIHOP = 72;
- SYS_EDOTDOT = 73;
- SYS_EBADMSG = 74;
- SYS_EOVERFLOW = 75;
- SYS_ENOTUNIQ = 76;
- SYS_EBADFD = 77;
- SYS_EREMCHG = 78;
- SYS_ELIBACC = 79;
- SYS_ELIBBAD = 80;
- SYS_ELIBSCN = 81;
- SYS_ELIBMAX = 82;
- SYS_ELIBEXEC = 83;
- SYS_EILSEQ = 84;
- SYS_ERESTART = 85;
- SYS_ESTRPIPE = 86;
- SYS_EUSERS = 87;
- SYS_ENOTSOCK = 88;
- SYS_EDESTADDRREQ = 89;
- SYS_EMSGSIZE = 90;
- SYS_EPROTOTYPE = 91;
- SYS_ENOPROTOOPT = 92;
- SYS_EPROTONOSUPPORT = 93;
- SYS_ESOCKTNOSUPPORT = 94;
- SYS_EOPNOTSUPP = 95;
- SYS_ENOTSUP = 95;
- SYS_EPFNOSUPPORT = 96;
- SYS_EAFNOSUPPORT = 97;
- SYS_EADDRINUSE = 98;
- SYS_EADDRNOTAVAIL = 99;
- SYS_ENETDOWN = 100;
- SYS_ENETUNREACH = 101;
- SYS_ENETRESET = 102;
- SYS_ECONNABORTED = 103;
- SYS_ECONNRESET = 104;
- SYS_ENOBUFS = 105;
- SYS_EISCONN = 106;
- SYS_ENOTCONN = 107;
- SYS_ESHUTDOWN = 108;
- SYS_ETOOMANYREFS = 109;
- SYS_ETIMEDOUT = 110;
- SYS_ECONNREFUSED = 111;
- SYS_EHOSTDOWN = 112;
- SYS_EHOSTUNREACH = 113;
- SYS_EALREADY = 114;
- SYS_EINPROGRESS = 115;
- SYS_ESTALE = 116;
- SYS_EUCLEAN = 117;
- SYS_ENOTNAM = 118;
- SYS_ENAVAIL = 119;
- SYS_EISNAM = 120;
- SYS_EREMOTEIO = 121;
- SYS_EDQUOT = 122;
- SYS_ENOMEDIUM = 123;
- SYS_EMEDIUMTYPE = 124;
- SYS_ECANCELED = 125;
- SYS_ENOKEY = 126;
- SYS_EKEYEXPIRED = 127;
- SYS_EKEYREVOKED = 128;
- SYS_EKEYREJECTED = 129;
- SYS_EOWNERDEAD = 130;
- SYS_ENOTRECOVERABLE = 131;
- SYS_ERFKILL = 132;
- }
-
- optional int32 system_error = 1 [default=0];
- optional string error_detail = 2;
-}
-
-message AddressPort {
- required int32 port = 1;
- optional bytes packed_address = 2;
-
- optional string hostname_hint = 3;
-}
-
-
-
-message CreateSocketRequest {
- enum SocketFamily {
- IPv4 = 1;
- IPv6 = 2;
- }
-
- enum SocketProtocol {
- TCP = 1;
- UDP = 2;
- }
-
- required SocketFamily family = 1;
- required SocketProtocol protocol = 2;
-
- repeated SocketOption socket_options = 3;
-
- optional AddressPort proxy_external_ip = 4;
-
- optional int32 listen_backlog = 5 [default=0];
-
- optional AddressPort remote_ip = 6;
-
- optional string app_id = 9;
-
- optional int64 project_id = 10;
-}
-
-message CreateSocketReply {
- optional string socket_descriptor = 1;
-
- optional AddressPort server_address = 3;
-
- optional AddressPort proxy_external_ip = 4;
-
- extensions 1000 to max;
-}
-
-
-
-message BindRequest {
- required string socket_descriptor = 1;
- required AddressPort proxy_external_ip = 2;
-}
-
-message BindReply {
- optional AddressPort proxy_external_ip = 1;
-}
-
-
-
-message GetSocketNameRequest {
- required string socket_descriptor = 1;
-}
-
-message GetSocketNameReply {
- optional AddressPort proxy_external_ip = 2;
-}
-
-
-
-message GetPeerNameRequest {
- required string socket_descriptor = 1;
-}
-
-message GetPeerNameReply {
- optional AddressPort peer_ip = 2;
-}
-
-
-message SocketOption {
-
- enum SocketOptionLevel {
- SOCKET_SOL_IP = 0;
- SOCKET_SOL_SOCKET = 1;
- SOCKET_SOL_TCP = 6;
- SOCKET_SOL_UDP = 17;
- }
-
- enum SocketOptionName {
- option allow_alias = true;
-
- SOCKET_SO_DEBUG = 1;
- SOCKET_SO_REUSEADDR = 2;
- SOCKET_SO_TYPE = 3;
- SOCKET_SO_ERROR = 4;
- SOCKET_SO_DONTROUTE = 5;
- SOCKET_SO_BROADCAST = 6;
- SOCKET_SO_SNDBUF = 7;
- SOCKET_SO_RCVBUF = 8;
- SOCKET_SO_KEEPALIVE = 9;
- SOCKET_SO_OOBINLINE = 10;
- SOCKET_SO_LINGER = 13;
- SOCKET_SO_RCVTIMEO = 20;
- SOCKET_SO_SNDTIMEO = 21;
-
- SOCKET_IP_TOS = 1;
- SOCKET_IP_TTL = 2;
- SOCKET_IP_HDRINCL = 3;
- SOCKET_IP_OPTIONS = 4;
-
- SOCKET_TCP_NODELAY = 1;
- SOCKET_TCP_MAXSEG = 2;
- SOCKET_TCP_CORK = 3;
- SOCKET_TCP_KEEPIDLE = 4;
- SOCKET_TCP_KEEPINTVL = 5;
- SOCKET_TCP_KEEPCNT = 6;
- SOCKET_TCP_SYNCNT = 7;
- SOCKET_TCP_LINGER2 = 8;
- SOCKET_TCP_DEFER_ACCEPT = 9;
- SOCKET_TCP_WINDOW_CLAMP = 10;
- SOCKET_TCP_INFO = 11;
- SOCKET_TCP_QUICKACK = 12;
- }
-
- required SocketOptionLevel level = 1;
- required SocketOptionName option = 2;
- required bytes value = 3;
-}
-
-
-message SetSocketOptionsRequest {
- required string socket_descriptor = 1;
- repeated SocketOption options = 2;
-}
-
-message SetSocketOptionsReply {
-}
-
-message GetSocketOptionsRequest {
- required string socket_descriptor = 1;
- repeated SocketOption options = 2;
-}
-
-message GetSocketOptionsReply {
- repeated SocketOption options = 2;
-}
-
-
-message ConnectRequest {
- required string socket_descriptor = 1;
- required AddressPort remote_ip = 2;
- optional double timeout_seconds = 3 [default=-1];
-}
-
-message ConnectReply {
- optional AddressPort proxy_external_ip = 1;
-
- extensions 1000 to max;
-}
-
-
-message ListenRequest {
- required string socket_descriptor = 1;
- required int32 backlog = 2;
-}
-
-message ListenReply {
-}
-
-
-message AcceptRequest {
- required string socket_descriptor = 1;
- optional double timeout_seconds = 2 [default=-1];
-}
-
-message AcceptReply {
- optional bytes new_socket_descriptor = 2;
- optional AddressPort remote_address = 3;
-}
-
-
-
-message ShutDownRequest {
- enum How {
- SOCKET_SHUT_RD = 1;
- SOCKET_SHUT_WR = 2;
- SOCKET_SHUT_RDWR = 3;
- }
- required string socket_descriptor = 1;
- required How how = 2;
- required int64 send_offset = 3;
-}
-
-message ShutDownReply {
-}
-
-
-
-message CloseRequest {
- required string socket_descriptor = 1;
- optional int64 send_offset = 2 [default=-1];
-}
-
-message CloseReply {
-}
-
-
-
-message SendRequest {
- required string socket_descriptor = 1;
- required bytes data = 2 [ctype=CORD];
- required int64 stream_offset = 3;
- optional int32 flags = 4 [default=0];
- optional AddressPort send_to = 5;
- optional double timeout_seconds = 6 [default=-1];
-}
-
-message SendReply {
- optional int32 data_sent = 1;
-}
-
-
-message ReceiveRequest {
- enum Flags {
- MSG_OOB = 1;
- MSG_PEEK = 2;
- }
- required string socket_descriptor = 1;
- required int32 data_size = 2;
- optional int32 flags = 3 [default=0];
- optional double timeout_seconds = 5 [default=-1];
-}
-
-message ReceiveReply {
- optional int64 stream_offset = 2;
- optional bytes data = 3 [ctype=CORD];
- optional AddressPort received_from = 4;
- optional int32 buffer_size = 5;
-}
-
-
-
-message PollEvent {
-
- enum PollEventFlag {
- SOCKET_POLLNONE = 0;
- SOCKET_POLLIN = 1;
- SOCKET_POLLPRI = 2;
- SOCKET_POLLOUT = 4;
- SOCKET_POLLERR = 8;
- SOCKET_POLLHUP = 16;
- SOCKET_POLLNVAL = 32;
- SOCKET_POLLRDNORM = 64;
- SOCKET_POLLRDBAND = 128;
- SOCKET_POLLWRNORM = 256;
- SOCKET_POLLWRBAND = 512;
- SOCKET_POLLMSG = 1024;
- SOCKET_POLLREMOVE = 4096;
- SOCKET_POLLRDHUP = 8192;
- };
-
- required string socket_descriptor = 1;
- required int32 requested_events = 2;
- required int32 observed_events = 3;
-}
-
-message PollRequest {
- repeated PollEvent events = 1;
- optional double timeout_seconds = 2 [default=-1];
-}
-
-message PollReply {
- repeated PollEvent events = 2;
-}
-
-message ResolveRequest {
- required string name = 1;
- repeated CreateSocketRequest.SocketFamily address_families = 2;
-}
-
-message ResolveReply {
- enum ErrorCode {
- SOCKET_EAI_ADDRFAMILY = 1;
- SOCKET_EAI_AGAIN = 2;
- SOCKET_EAI_BADFLAGS = 3;
- SOCKET_EAI_FAIL = 4;
- SOCKET_EAI_FAMILY = 5;
- SOCKET_EAI_MEMORY = 6;
- SOCKET_EAI_NODATA = 7;
- SOCKET_EAI_NONAME = 8;
- SOCKET_EAI_SERVICE = 9;
- SOCKET_EAI_SOCKTYPE = 10;
- SOCKET_EAI_SYSTEM = 11;
- SOCKET_EAI_BADHINTS = 12;
- SOCKET_EAI_PROTOCOL = 13;
- SOCKET_EAI_OVERFLOW = 14;
- SOCKET_EAI_MAX = 15;
- };
-
- repeated bytes packed_address = 2;
- optional string canonical_name = 3;
- repeated string aliases = 4;
-}
diff --git a/vendor/google.golang.org/appengine/internal/system/system_service.pb.go b/vendor/google.golang.org/appengine/internal/system/system_service.pb.go
deleted file mode 100644
index 56cc3f8..0000000
--- a/vendor/google.golang.org/appengine/internal/system/system_service.pb.go
+++ /dev/null
@@ -1,198 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/system/system_service.proto
-// DO NOT EDIT!
-
-/*
-Package system is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/system/system_service.proto
-
-It has these top-level messages:
- SystemServiceError
- SystemStat
- GetSystemStatsRequest
- GetSystemStatsResponse
- StartBackgroundRequestRequest
- StartBackgroundRequestResponse
-*/
-package system
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type SystemServiceError_ErrorCode int32
-
-const (
- SystemServiceError_OK SystemServiceError_ErrorCode = 0
- SystemServiceError_INTERNAL_ERROR SystemServiceError_ErrorCode = 1
- SystemServiceError_BACKEND_REQUIRED SystemServiceError_ErrorCode = 2
- SystemServiceError_LIMIT_REACHED SystemServiceError_ErrorCode = 3
-)
-
-var SystemServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INTERNAL_ERROR",
- 2: "BACKEND_REQUIRED",
- 3: "LIMIT_REACHED",
-}
-var SystemServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INTERNAL_ERROR": 1,
- "BACKEND_REQUIRED": 2,
- "LIMIT_REACHED": 3,
-}
-
-func (x SystemServiceError_ErrorCode) Enum() *SystemServiceError_ErrorCode {
- p := new(SystemServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x SystemServiceError_ErrorCode) String() string {
- return proto.EnumName(SystemServiceError_ErrorCode_name, int32(x))
-}
-func (x *SystemServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(SystemServiceError_ErrorCode_value, data, "SystemServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = SystemServiceError_ErrorCode(value)
- return nil
-}
-
-type SystemServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SystemServiceError) Reset() { *m = SystemServiceError{} }
-func (m *SystemServiceError) String() string { return proto.CompactTextString(m) }
-func (*SystemServiceError) ProtoMessage() {}
-
-type SystemStat struct {
- // Instaneous value of this stat.
- Current *float64 `protobuf:"fixed64,1,opt,name=current" json:"current,omitempty"`
- // Average over time, if this stat has an instaneous value.
- Average1M *float64 `protobuf:"fixed64,3,opt,name=average1m" json:"average1m,omitempty"`
- Average10M *float64 `protobuf:"fixed64,4,opt,name=average10m" json:"average10m,omitempty"`
- // Total value, if the stat accumulates over time.
- Total *float64 `protobuf:"fixed64,2,opt,name=total" json:"total,omitempty"`
- // Rate over time, if this stat accumulates.
- Rate1M *float64 `protobuf:"fixed64,5,opt,name=rate1m" json:"rate1m,omitempty"`
- Rate10M *float64 `protobuf:"fixed64,6,opt,name=rate10m" json:"rate10m,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *SystemStat) Reset() { *m = SystemStat{} }
-func (m *SystemStat) String() string { return proto.CompactTextString(m) }
-func (*SystemStat) ProtoMessage() {}
-
-func (m *SystemStat) GetCurrent() float64 {
- if m != nil && m.Current != nil {
- return *m.Current
- }
- return 0
-}
-
-func (m *SystemStat) GetAverage1M() float64 {
- if m != nil && m.Average1M != nil {
- return *m.Average1M
- }
- return 0
-}
-
-func (m *SystemStat) GetAverage10M() float64 {
- if m != nil && m.Average10M != nil {
- return *m.Average10M
- }
- return 0
-}
-
-func (m *SystemStat) GetTotal() float64 {
- if m != nil && m.Total != nil {
- return *m.Total
- }
- return 0
-}
-
-func (m *SystemStat) GetRate1M() float64 {
- if m != nil && m.Rate1M != nil {
- return *m.Rate1M
- }
- return 0
-}
-
-func (m *SystemStat) GetRate10M() float64 {
- if m != nil && m.Rate10M != nil {
- return *m.Rate10M
- }
- return 0
-}
-
-type GetSystemStatsRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSystemStatsRequest) Reset() { *m = GetSystemStatsRequest{} }
-func (m *GetSystemStatsRequest) String() string { return proto.CompactTextString(m) }
-func (*GetSystemStatsRequest) ProtoMessage() {}
-
-type GetSystemStatsResponse struct {
- // CPU used by this instance, in mcycles.
- Cpu *SystemStat `protobuf:"bytes,1,opt,name=cpu" json:"cpu,omitempty"`
- // Physical memory (RAM) used by this instance, in megabytes.
- Memory *SystemStat `protobuf:"bytes,2,opt,name=memory" json:"memory,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetSystemStatsResponse) Reset() { *m = GetSystemStatsResponse{} }
-func (m *GetSystemStatsResponse) String() string { return proto.CompactTextString(m) }
-func (*GetSystemStatsResponse) ProtoMessage() {}
-
-func (m *GetSystemStatsResponse) GetCpu() *SystemStat {
- if m != nil {
- return m.Cpu
- }
- return nil
-}
-
-func (m *GetSystemStatsResponse) GetMemory() *SystemStat {
- if m != nil {
- return m.Memory
- }
- return nil
-}
-
-type StartBackgroundRequestRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StartBackgroundRequestRequest) Reset() { *m = StartBackgroundRequestRequest{} }
-func (m *StartBackgroundRequestRequest) String() string { return proto.CompactTextString(m) }
-func (*StartBackgroundRequestRequest) ProtoMessage() {}
-
-type StartBackgroundRequestResponse struct {
- // Every /_ah/background request will have an X-AppEngine-BackgroundRequest
- // header, whose value will be equal to this parameter, the request_id.
- RequestId *string `protobuf:"bytes,1,opt,name=request_id" json:"request_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *StartBackgroundRequestResponse) Reset() { *m = StartBackgroundRequestResponse{} }
-func (m *StartBackgroundRequestResponse) String() string { return proto.CompactTextString(m) }
-func (*StartBackgroundRequestResponse) ProtoMessage() {}
-
-func (m *StartBackgroundRequestResponse) GetRequestId() string {
- if m != nil && m.RequestId != nil {
- return *m.RequestId
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/system/system_service.proto b/vendor/google.golang.org/appengine/internal/system/system_service.proto
deleted file mode 100644
index 32c0bf8..0000000
--- a/vendor/google.golang.org/appengine/internal/system/system_service.proto
+++ /dev/null
@@ -1,49 +0,0 @@
-syntax = "proto2";
-option go_package = "system";
-
-package appengine;
-
-message SystemServiceError {
- enum ErrorCode {
- OK = 0;
- INTERNAL_ERROR = 1;
- BACKEND_REQUIRED = 2;
- LIMIT_REACHED = 3;
- }
-}
-
-message SystemStat {
- // Instaneous value of this stat.
- optional double current = 1;
-
- // Average over time, if this stat has an instaneous value.
- optional double average1m = 3;
- optional double average10m = 4;
-
- // Total value, if the stat accumulates over time.
- optional double total = 2;
-
- // Rate over time, if this stat accumulates.
- optional double rate1m = 5;
- optional double rate10m = 6;
-}
-
-message GetSystemStatsRequest {
-}
-
-message GetSystemStatsResponse {
- // CPU used by this instance, in mcycles.
- optional SystemStat cpu = 1;
-
- // Physical memory (RAM) used by this instance, in megabytes.
- optional SystemStat memory = 2;
-}
-
-message StartBackgroundRequestRequest {
-}
-
-message StartBackgroundRequestResponse {
- // Every /_ah/background request will have an X-AppEngine-BackgroundRequest
- // header, whose value will be equal to this parameter, the request_id.
- optional string request_id = 1;
-}
diff --git a/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.pb.go b/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.pb.go
deleted file mode 100644
index c3d428e..0000000
--- a/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.pb.go
+++ /dev/null
@@ -1,1888 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
-// DO NOT EDIT!
-
-/*
-Package taskqueue is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
-
-It has these top-level messages:
- TaskQueueServiceError
- TaskPayload
- TaskQueueRetryParameters
- TaskQueueAcl
- TaskQueueHttpHeader
- TaskQueueMode
- TaskQueueAddRequest
- TaskQueueAddResponse
- TaskQueueBulkAddRequest
- TaskQueueBulkAddResponse
- TaskQueueDeleteRequest
- TaskQueueDeleteResponse
- TaskQueueForceRunRequest
- TaskQueueForceRunResponse
- TaskQueueUpdateQueueRequest
- TaskQueueUpdateQueueResponse
- TaskQueueFetchQueuesRequest
- TaskQueueFetchQueuesResponse
- TaskQueueFetchQueueStatsRequest
- TaskQueueScannerQueueInfo
- TaskQueueFetchQueueStatsResponse
- TaskQueuePauseQueueRequest
- TaskQueuePauseQueueResponse
- TaskQueuePurgeQueueRequest
- TaskQueuePurgeQueueResponse
- TaskQueueDeleteQueueRequest
- TaskQueueDeleteQueueResponse
- TaskQueueDeleteGroupRequest
- TaskQueueDeleteGroupResponse
- TaskQueueQueryTasksRequest
- TaskQueueQueryTasksResponse
- TaskQueueFetchTaskRequest
- TaskQueueFetchTaskResponse
- TaskQueueUpdateStorageLimitRequest
- TaskQueueUpdateStorageLimitResponse
- TaskQueueQueryAndOwnTasksRequest
- TaskQueueQueryAndOwnTasksResponse
- TaskQueueModifyTaskLeaseRequest
- TaskQueueModifyTaskLeaseResponse
-*/
-package taskqueue
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-import appengine "google.golang.org/appengine/internal/datastore"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type TaskQueueServiceError_ErrorCode int32
-
-const (
- TaskQueueServiceError_OK TaskQueueServiceError_ErrorCode = 0
- TaskQueueServiceError_UNKNOWN_QUEUE TaskQueueServiceError_ErrorCode = 1
- TaskQueueServiceError_TRANSIENT_ERROR TaskQueueServiceError_ErrorCode = 2
- TaskQueueServiceError_INTERNAL_ERROR TaskQueueServiceError_ErrorCode = 3
- TaskQueueServiceError_TASK_TOO_LARGE TaskQueueServiceError_ErrorCode = 4
- TaskQueueServiceError_INVALID_TASK_NAME TaskQueueServiceError_ErrorCode = 5
- TaskQueueServiceError_INVALID_QUEUE_NAME TaskQueueServiceError_ErrorCode = 6
- TaskQueueServiceError_INVALID_URL TaskQueueServiceError_ErrorCode = 7
- TaskQueueServiceError_INVALID_QUEUE_RATE TaskQueueServiceError_ErrorCode = 8
- TaskQueueServiceError_PERMISSION_DENIED TaskQueueServiceError_ErrorCode = 9
- TaskQueueServiceError_TASK_ALREADY_EXISTS TaskQueueServiceError_ErrorCode = 10
- TaskQueueServiceError_TOMBSTONED_TASK TaskQueueServiceError_ErrorCode = 11
- TaskQueueServiceError_INVALID_ETA TaskQueueServiceError_ErrorCode = 12
- TaskQueueServiceError_INVALID_REQUEST TaskQueueServiceError_ErrorCode = 13
- TaskQueueServiceError_UNKNOWN_TASK TaskQueueServiceError_ErrorCode = 14
- TaskQueueServiceError_TOMBSTONED_QUEUE TaskQueueServiceError_ErrorCode = 15
- TaskQueueServiceError_DUPLICATE_TASK_NAME TaskQueueServiceError_ErrorCode = 16
- TaskQueueServiceError_SKIPPED TaskQueueServiceError_ErrorCode = 17
- TaskQueueServiceError_TOO_MANY_TASKS TaskQueueServiceError_ErrorCode = 18
- TaskQueueServiceError_INVALID_PAYLOAD TaskQueueServiceError_ErrorCode = 19
- TaskQueueServiceError_INVALID_RETRY_PARAMETERS TaskQueueServiceError_ErrorCode = 20
- TaskQueueServiceError_INVALID_QUEUE_MODE TaskQueueServiceError_ErrorCode = 21
- TaskQueueServiceError_ACL_LOOKUP_ERROR TaskQueueServiceError_ErrorCode = 22
- TaskQueueServiceError_TRANSACTIONAL_REQUEST_TOO_LARGE TaskQueueServiceError_ErrorCode = 23
- TaskQueueServiceError_INCORRECT_CREATOR_NAME TaskQueueServiceError_ErrorCode = 24
- TaskQueueServiceError_TASK_LEASE_EXPIRED TaskQueueServiceError_ErrorCode = 25
- TaskQueueServiceError_QUEUE_PAUSED TaskQueueServiceError_ErrorCode = 26
- TaskQueueServiceError_INVALID_TAG TaskQueueServiceError_ErrorCode = 27
- // Reserved range for the Datastore error codes.
- // Original Datastore error code is shifted by DATASTORE_ERROR offset.
- TaskQueueServiceError_DATASTORE_ERROR TaskQueueServiceError_ErrorCode = 10000
-)
-
-var TaskQueueServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "UNKNOWN_QUEUE",
- 2: "TRANSIENT_ERROR",
- 3: "INTERNAL_ERROR",
- 4: "TASK_TOO_LARGE",
- 5: "INVALID_TASK_NAME",
- 6: "INVALID_QUEUE_NAME",
- 7: "INVALID_URL",
- 8: "INVALID_QUEUE_RATE",
- 9: "PERMISSION_DENIED",
- 10: "TASK_ALREADY_EXISTS",
- 11: "TOMBSTONED_TASK",
- 12: "INVALID_ETA",
- 13: "INVALID_REQUEST",
- 14: "UNKNOWN_TASK",
- 15: "TOMBSTONED_QUEUE",
- 16: "DUPLICATE_TASK_NAME",
- 17: "SKIPPED",
- 18: "TOO_MANY_TASKS",
- 19: "INVALID_PAYLOAD",
- 20: "INVALID_RETRY_PARAMETERS",
- 21: "INVALID_QUEUE_MODE",
- 22: "ACL_LOOKUP_ERROR",
- 23: "TRANSACTIONAL_REQUEST_TOO_LARGE",
- 24: "INCORRECT_CREATOR_NAME",
- 25: "TASK_LEASE_EXPIRED",
- 26: "QUEUE_PAUSED",
- 27: "INVALID_TAG",
- 10000: "DATASTORE_ERROR",
-}
-var TaskQueueServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "UNKNOWN_QUEUE": 1,
- "TRANSIENT_ERROR": 2,
- "INTERNAL_ERROR": 3,
- "TASK_TOO_LARGE": 4,
- "INVALID_TASK_NAME": 5,
- "INVALID_QUEUE_NAME": 6,
- "INVALID_URL": 7,
- "INVALID_QUEUE_RATE": 8,
- "PERMISSION_DENIED": 9,
- "TASK_ALREADY_EXISTS": 10,
- "TOMBSTONED_TASK": 11,
- "INVALID_ETA": 12,
- "INVALID_REQUEST": 13,
- "UNKNOWN_TASK": 14,
- "TOMBSTONED_QUEUE": 15,
- "DUPLICATE_TASK_NAME": 16,
- "SKIPPED": 17,
- "TOO_MANY_TASKS": 18,
- "INVALID_PAYLOAD": 19,
- "INVALID_RETRY_PARAMETERS": 20,
- "INVALID_QUEUE_MODE": 21,
- "ACL_LOOKUP_ERROR": 22,
- "TRANSACTIONAL_REQUEST_TOO_LARGE": 23,
- "INCORRECT_CREATOR_NAME": 24,
- "TASK_LEASE_EXPIRED": 25,
- "QUEUE_PAUSED": 26,
- "INVALID_TAG": 27,
- "DATASTORE_ERROR": 10000,
-}
-
-func (x TaskQueueServiceError_ErrorCode) Enum() *TaskQueueServiceError_ErrorCode {
- p := new(TaskQueueServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x TaskQueueServiceError_ErrorCode) String() string {
- return proto.EnumName(TaskQueueServiceError_ErrorCode_name, int32(x))
-}
-func (x *TaskQueueServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(TaskQueueServiceError_ErrorCode_value, data, "TaskQueueServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = TaskQueueServiceError_ErrorCode(value)
- return nil
-}
-
-type TaskQueueMode_Mode int32
-
-const (
- TaskQueueMode_PUSH TaskQueueMode_Mode = 0
- TaskQueueMode_PULL TaskQueueMode_Mode = 1
-)
-
-var TaskQueueMode_Mode_name = map[int32]string{
- 0: "PUSH",
- 1: "PULL",
-}
-var TaskQueueMode_Mode_value = map[string]int32{
- "PUSH": 0,
- "PULL": 1,
-}
-
-func (x TaskQueueMode_Mode) Enum() *TaskQueueMode_Mode {
- p := new(TaskQueueMode_Mode)
- *p = x
- return p
-}
-func (x TaskQueueMode_Mode) String() string {
- return proto.EnumName(TaskQueueMode_Mode_name, int32(x))
-}
-func (x *TaskQueueMode_Mode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(TaskQueueMode_Mode_value, data, "TaskQueueMode_Mode")
- if err != nil {
- return err
- }
- *x = TaskQueueMode_Mode(value)
- return nil
-}
-
-type TaskQueueAddRequest_RequestMethod int32
-
-const (
- TaskQueueAddRequest_GET TaskQueueAddRequest_RequestMethod = 1
- TaskQueueAddRequest_POST TaskQueueAddRequest_RequestMethod = 2
- TaskQueueAddRequest_HEAD TaskQueueAddRequest_RequestMethod = 3
- TaskQueueAddRequest_PUT TaskQueueAddRequest_RequestMethod = 4
- TaskQueueAddRequest_DELETE TaskQueueAddRequest_RequestMethod = 5
-)
-
-var TaskQueueAddRequest_RequestMethod_name = map[int32]string{
- 1: "GET",
- 2: "POST",
- 3: "HEAD",
- 4: "PUT",
- 5: "DELETE",
-}
-var TaskQueueAddRequest_RequestMethod_value = map[string]int32{
- "GET": 1,
- "POST": 2,
- "HEAD": 3,
- "PUT": 4,
- "DELETE": 5,
-}
-
-func (x TaskQueueAddRequest_RequestMethod) Enum() *TaskQueueAddRequest_RequestMethod {
- p := new(TaskQueueAddRequest_RequestMethod)
- *p = x
- return p
-}
-func (x TaskQueueAddRequest_RequestMethod) String() string {
- return proto.EnumName(TaskQueueAddRequest_RequestMethod_name, int32(x))
-}
-func (x *TaskQueueAddRequest_RequestMethod) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(TaskQueueAddRequest_RequestMethod_value, data, "TaskQueueAddRequest_RequestMethod")
- if err != nil {
- return err
- }
- *x = TaskQueueAddRequest_RequestMethod(value)
- return nil
-}
-
-type TaskQueueQueryTasksResponse_Task_RequestMethod int32
-
-const (
- TaskQueueQueryTasksResponse_Task_GET TaskQueueQueryTasksResponse_Task_RequestMethod = 1
- TaskQueueQueryTasksResponse_Task_POST TaskQueueQueryTasksResponse_Task_RequestMethod = 2
- TaskQueueQueryTasksResponse_Task_HEAD TaskQueueQueryTasksResponse_Task_RequestMethod = 3
- TaskQueueQueryTasksResponse_Task_PUT TaskQueueQueryTasksResponse_Task_RequestMethod = 4
- TaskQueueQueryTasksResponse_Task_DELETE TaskQueueQueryTasksResponse_Task_RequestMethod = 5
-)
-
-var TaskQueueQueryTasksResponse_Task_RequestMethod_name = map[int32]string{
- 1: "GET",
- 2: "POST",
- 3: "HEAD",
- 4: "PUT",
- 5: "DELETE",
-}
-var TaskQueueQueryTasksResponse_Task_RequestMethod_value = map[string]int32{
- "GET": 1,
- "POST": 2,
- "HEAD": 3,
- "PUT": 4,
- "DELETE": 5,
-}
-
-func (x TaskQueueQueryTasksResponse_Task_RequestMethod) Enum() *TaskQueueQueryTasksResponse_Task_RequestMethod {
- p := new(TaskQueueQueryTasksResponse_Task_RequestMethod)
- *p = x
- return p
-}
-func (x TaskQueueQueryTasksResponse_Task_RequestMethod) String() string {
- return proto.EnumName(TaskQueueQueryTasksResponse_Task_RequestMethod_name, int32(x))
-}
-func (x *TaskQueueQueryTasksResponse_Task_RequestMethod) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(TaskQueueQueryTasksResponse_Task_RequestMethod_value, data, "TaskQueueQueryTasksResponse_Task_RequestMethod")
- if err != nil {
- return err
- }
- *x = TaskQueueQueryTasksResponse_Task_RequestMethod(value)
- return nil
-}
-
-type TaskQueueServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueServiceError) Reset() { *m = TaskQueueServiceError{} }
-func (m *TaskQueueServiceError) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueServiceError) ProtoMessage() {}
-
-type TaskPayload struct {
- XXX_extensions map[int32]proto.Extension `json:"-"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskPayload) Reset() { *m = TaskPayload{} }
-func (m *TaskPayload) String() string { return proto.CompactTextString(m) }
-func (*TaskPayload) ProtoMessage() {}
-
-func (m *TaskPayload) Marshal() ([]byte, error) {
- return proto.MarshalMessageSet(m.ExtensionMap())
-}
-func (m *TaskPayload) Unmarshal(buf []byte) error {
- return proto.UnmarshalMessageSet(buf, m.ExtensionMap())
-}
-func (m *TaskPayload) MarshalJSON() ([]byte, error) {
- return proto.MarshalMessageSetJSON(m.XXX_extensions)
-}
-func (m *TaskPayload) UnmarshalJSON(buf []byte) error {
- return proto.UnmarshalMessageSetJSON(buf, m.XXX_extensions)
-}
-
-// ensure TaskPayload satisfies proto.Marshaler and proto.Unmarshaler
-var _ proto.Marshaler = (*TaskPayload)(nil)
-var _ proto.Unmarshaler = (*TaskPayload)(nil)
-
-var extRange_TaskPayload = []proto.ExtensionRange{
- {10, 2147483646},
-}
-
-func (*TaskPayload) ExtensionRangeArray() []proto.ExtensionRange {
- return extRange_TaskPayload
-}
-func (m *TaskPayload) ExtensionMap() map[int32]proto.Extension {
- if m.XXX_extensions == nil {
- m.XXX_extensions = make(map[int32]proto.Extension)
- }
- return m.XXX_extensions
-}
-
-type TaskQueueRetryParameters struct {
- RetryLimit *int32 `protobuf:"varint,1,opt,name=retry_limit" json:"retry_limit,omitempty"`
- AgeLimitSec *int64 `protobuf:"varint,2,opt,name=age_limit_sec" json:"age_limit_sec,omitempty"`
- MinBackoffSec *float64 `protobuf:"fixed64,3,opt,name=min_backoff_sec,def=0.1" json:"min_backoff_sec,omitempty"`
- MaxBackoffSec *float64 `protobuf:"fixed64,4,opt,name=max_backoff_sec,def=3600" json:"max_backoff_sec,omitempty"`
- MaxDoublings *int32 `protobuf:"varint,5,opt,name=max_doublings,def=16" json:"max_doublings,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueRetryParameters) Reset() { *m = TaskQueueRetryParameters{} }
-func (m *TaskQueueRetryParameters) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueRetryParameters) ProtoMessage() {}
-
-const Default_TaskQueueRetryParameters_MinBackoffSec float64 = 0.1
-const Default_TaskQueueRetryParameters_MaxBackoffSec float64 = 3600
-const Default_TaskQueueRetryParameters_MaxDoublings int32 = 16
-
-func (m *TaskQueueRetryParameters) GetRetryLimit() int32 {
- if m != nil && m.RetryLimit != nil {
- return *m.RetryLimit
- }
- return 0
-}
-
-func (m *TaskQueueRetryParameters) GetAgeLimitSec() int64 {
- if m != nil && m.AgeLimitSec != nil {
- return *m.AgeLimitSec
- }
- return 0
-}
-
-func (m *TaskQueueRetryParameters) GetMinBackoffSec() float64 {
- if m != nil && m.MinBackoffSec != nil {
- return *m.MinBackoffSec
- }
- return Default_TaskQueueRetryParameters_MinBackoffSec
-}
-
-func (m *TaskQueueRetryParameters) GetMaxBackoffSec() float64 {
- if m != nil && m.MaxBackoffSec != nil {
- return *m.MaxBackoffSec
- }
- return Default_TaskQueueRetryParameters_MaxBackoffSec
-}
-
-func (m *TaskQueueRetryParameters) GetMaxDoublings() int32 {
- if m != nil && m.MaxDoublings != nil {
- return *m.MaxDoublings
- }
- return Default_TaskQueueRetryParameters_MaxDoublings
-}
-
-type TaskQueueAcl struct {
- UserEmail [][]byte `protobuf:"bytes,1,rep,name=user_email" json:"user_email,omitempty"`
- WriterEmail [][]byte `protobuf:"bytes,2,rep,name=writer_email" json:"writer_email,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueAcl) Reset() { *m = TaskQueueAcl{} }
-func (m *TaskQueueAcl) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueAcl) ProtoMessage() {}
-
-func (m *TaskQueueAcl) GetUserEmail() [][]byte {
- if m != nil {
- return m.UserEmail
- }
- return nil
-}
-
-func (m *TaskQueueAcl) GetWriterEmail() [][]byte {
- if m != nil {
- return m.WriterEmail
- }
- return nil
-}
-
-type TaskQueueHttpHeader struct {
- Key []byte `protobuf:"bytes,1,req,name=key" json:"key,omitempty"`
- Value []byte `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueHttpHeader) Reset() { *m = TaskQueueHttpHeader{} }
-func (m *TaskQueueHttpHeader) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueHttpHeader) ProtoMessage() {}
-
-func (m *TaskQueueHttpHeader) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *TaskQueueHttpHeader) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type TaskQueueMode struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueMode) Reset() { *m = TaskQueueMode{} }
-func (m *TaskQueueMode) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueMode) ProtoMessage() {}
-
-type TaskQueueAddRequest struct {
- QueueName []byte `protobuf:"bytes,1,req,name=queue_name" json:"queue_name,omitempty"`
- TaskName []byte `protobuf:"bytes,2,req,name=task_name" json:"task_name,omitempty"`
- EtaUsec *int64 `protobuf:"varint,3,req,name=eta_usec" json:"eta_usec,omitempty"`
- Method *TaskQueueAddRequest_RequestMethod `protobuf:"varint,5,opt,name=method,enum=appengine.TaskQueueAddRequest_RequestMethod,def=2" json:"method,omitempty"`
- Url []byte `protobuf:"bytes,4,opt,name=url" json:"url,omitempty"`
- Header []*TaskQueueAddRequest_Header `protobuf:"group,6,rep,name=Header" json:"header,omitempty"`
- Body []byte `protobuf:"bytes,9,opt,name=body" json:"body,omitempty"`
- Transaction *appengine.Transaction `protobuf:"bytes,10,opt,name=transaction" json:"transaction,omitempty"`
- AppId []byte `protobuf:"bytes,11,opt,name=app_id" json:"app_id,omitempty"`
- Crontimetable *TaskQueueAddRequest_CronTimetable `protobuf:"group,12,opt,name=CronTimetable" json:"crontimetable,omitempty"`
- Description []byte `protobuf:"bytes,15,opt,name=description" json:"description,omitempty"`
- Payload *TaskPayload `protobuf:"bytes,16,opt,name=payload" json:"payload,omitempty"`
- RetryParameters *TaskQueueRetryParameters `protobuf:"bytes,17,opt,name=retry_parameters" json:"retry_parameters,omitempty"`
- Mode *TaskQueueMode_Mode `protobuf:"varint,18,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
- Tag []byte `protobuf:"bytes,19,opt,name=tag" json:"tag,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueAddRequest) Reset() { *m = TaskQueueAddRequest{} }
-func (m *TaskQueueAddRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueAddRequest) ProtoMessage() {}
-
-const Default_TaskQueueAddRequest_Method TaskQueueAddRequest_RequestMethod = TaskQueueAddRequest_POST
-const Default_TaskQueueAddRequest_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
-
-func (m *TaskQueueAddRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetEtaUsec() int64 {
- if m != nil && m.EtaUsec != nil {
- return *m.EtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueAddRequest) GetMethod() TaskQueueAddRequest_RequestMethod {
- if m != nil && m.Method != nil {
- return *m.Method
- }
- return Default_TaskQueueAddRequest_Method
-}
-
-func (m *TaskQueueAddRequest) GetUrl() []byte {
- if m != nil {
- return m.Url
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetHeader() []*TaskQueueAddRequest_Header {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetBody() []byte {
- if m != nil {
- return m.Body
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetTransaction() *appengine.Transaction {
- if m != nil {
- return m.Transaction
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetCrontimetable() *TaskQueueAddRequest_CronTimetable {
- if m != nil {
- return m.Crontimetable
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetDescription() []byte {
- if m != nil {
- return m.Description
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetPayload() *TaskPayload {
- if m != nil {
- return m.Payload
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetRetryParameters() *TaskQueueRetryParameters {
- if m != nil {
- return m.RetryParameters
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest) GetMode() TaskQueueMode_Mode {
- if m != nil && m.Mode != nil {
- return *m.Mode
- }
- return Default_TaskQueueAddRequest_Mode
-}
-
-func (m *TaskQueueAddRequest) GetTag() []byte {
- if m != nil {
- return m.Tag
- }
- return nil
-}
-
-type TaskQueueAddRequest_Header struct {
- Key []byte `protobuf:"bytes,7,req,name=key" json:"key,omitempty"`
- Value []byte `protobuf:"bytes,8,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueAddRequest_Header) Reset() { *m = TaskQueueAddRequest_Header{} }
-func (m *TaskQueueAddRequest_Header) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueAddRequest_Header) ProtoMessage() {}
-
-func (m *TaskQueueAddRequest_Header) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest_Header) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type TaskQueueAddRequest_CronTimetable struct {
- Schedule []byte `protobuf:"bytes,13,req,name=schedule" json:"schedule,omitempty"`
- Timezone []byte `protobuf:"bytes,14,req,name=timezone" json:"timezone,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueAddRequest_CronTimetable) Reset() { *m = TaskQueueAddRequest_CronTimetable{} }
-func (m *TaskQueueAddRequest_CronTimetable) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueAddRequest_CronTimetable) ProtoMessage() {}
-
-func (m *TaskQueueAddRequest_CronTimetable) GetSchedule() []byte {
- if m != nil {
- return m.Schedule
- }
- return nil
-}
-
-func (m *TaskQueueAddRequest_CronTimetable) GetTimezone() []byte {
- if m != nil {
- return m.Timezone
- }
- return nil
-}
-
-type TaskQueueAddResponse struct {
- ChosenTaskName []byte `protobuf:"bytes,1,opt,name=chosen_task_name" json:"chosen_task_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueAddResponse) Reset() { *m = TaskQueueAddResponse{} }
-func (m *TaskQueueAddResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueAddResponse) ProtoMessage() {}
-
-func (m *TaskQueueAddResponse) GetChosenTaskName() []byte {
- if m != nil {
- return m.ChosenTaskName
- }
- return nil
-}
-
-type TaskQueueBulkAddRequest struct {
- AddRequest []*TaskQueueAddRequest `protobuf:"bytes,1,rep,name=add_request" json:"add_request,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueBulkAddRequest) Reset() { *m = TaskQueueBulkAddRequest{} }
-func (m *TaskQueueBulkAddRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueBulkAddRequest) ProtoMessage() {}
-
-func (m *TaskQueueBulkAddRequest) GetAddRequest() []*TaskQueueAddRequest {
- if m != nil {
- return m.AddRequest
- }
- return nil
-}
-
-type TaskQueueBulkAddResponse struct {
- Taskresult []*TaskQueueBulkAddResponse_TaskResult `protobuf:"group,1,rep,name=TaskResult" json:"taskresult,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueBulkAddResponse) Reset() { *m = TaskQueueBulkAddResponse{} }
-func (m *TaskQueueBulkAddResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueBulkAddResponse) ProtoMessage() {}
-
-func (m *TaskQueueBulkAddResponse) GetTaskresult() []*TaskQueueBulkAddResponse_TaskResult {
- if m != nil {
- return m.Taskresult
- }
- return nil
-}
-
-type TaskQueueBulkAddResponse_TaskResult struct {
- Result *TaskQueueServiceError_ErrorCode `protobuf:"varint,2,req,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
- ChosenTaskName []byte `protobuf:"bytes,3,opt,name=chosen_task_name" json:"chosen_task_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueBulkAddResponse_TaskResult) Reset() { *m = TaskQueueBulkAddResponse_TaskResult{} }
-func (m *TaskQueueBulkAddResponse_TaskResult) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueBulkAddResponse_TaskResult) ProtoMessage() {}
-
-func (m *TaskQueueBulkAddResponse_TaskResult) GetResult() TaskQueueServiceError_ErrorCode {
- if m != nil && m.Result != nil {
- return *m.Result
- }
- return TaskQueueServiceError_OK
-}
-
-func (m *TaskQueueBulkAddResponse_TaskResult) GetChosenTaskName() []byte {
- if m != nil {
- return m.ChosenTaskName
- }
- return nil
-}
-
-type TaskQueueDeleteRequest struct {
- QueueName []byte `protobuf:"bytes,1,req,name=queue_name" json:"queue_name,omitempty"`
- TaskName [][]byte `protobuf:"bytes,2,rep,name=task_name" json:"task_name,omitempty"`
- AppId []byte `protobuf:"bytes,3,opt,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteRequest) Reset() { *m = TaskQueueDeleteRequest{} }
-func (m *TaskQueueDeleteRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteRequest) ProtoMessage() {}
-
-func (m *TaskQueueDeleteRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueDeleteRequest) GetTaskName() [][]byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-func (m *TaskQueueDeleteRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type TaskQueueDeleteResponse struct {
- Result []TaskQueueServiceError_ErrorCode `protobuf:"varint,3,rep,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteResponse) Reset() { *m = TaskQueueDeleteResponse{} }
-func (m *TaskQueueDeleteResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteResponse) ProtoMessage() {}
-
-func (m *TaskQueueDeleteResponse) GetResult() []TaskQueueServiceError_ErrorCode {
- if m != nil {
- return m.Result
- }
- return nil
-}
-
-type TaskQueueForceRunRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- TaskName []byte `protobuf:"bytes,3,req,name=task_name" json:"task_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueForceRunRequest) Reset() { *m = TaskQueueForceRunRequest{} }
-func (m *TaskQueueForceRunRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueForceRunRequest) ProtoMessage() {}
-
-func (m *TaskQueueForceRunRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueForceRunRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueForceRunRequest) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-type TaskQueueForceRunResponse struct {
- Result *TaskQueueServiceError_ErrorCode `protobuf:"varint,3,req,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueForceRunResponse) Reset() { *m = TaskQueueForceRunResponse{} }
-func (m *TaskQueueForceRunResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueForceRunResponse) ProtoMessage() {}
-
-func (m *TaskQueueForceRunResponse) GetResult() TaskQueueServiceError_ErrorCode {
- if m != nil && m.Result != nil {
- return *m.Result
- }
- return TaskQueueServiceError_OK
-}
-
-type TaskQueueUpdateQueueRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- BucketRefillPerSecond *float64 `protobuf:"fixed64,3,req,name=bucket_refill_per_second" json:"bucket_refill_per_second,omitempty"`
- BucketCapacity *int32 `protobuf:"varint,4,req,name=bucket_capacity" json:"bucket_capacity,omitempty"`
- UserSpecifiedRate *string `protobuf:"bytes,5,opt,name=user_specified_rate" json:"user_specified_rate,omitempty"`
- RetryParameters *TaskQueueRetryParameters `protobuf:"bytes,6,opt,name=retry_parameters" json:"retry_parameters,omitempty"`
- MaxConcurrentRequests *int32 `protobuf:"varint,7,opt,name=max_concurrent_requests" json:"max_concurrent_requests,omitempty"`
- Mode *TaskQueueMode_Mode `protobuf:"varint,8,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
- Acl *TaskQueueAcl `protobuf:"bytes,9,opt,name=acl" json:"acl,omitempty"`
- HeaderOverride []*TaskQueueHttpHeader `protobuf:"bytes,10,rep,name=header_override" json:"header_override,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueUpdateQueueRequest) Reset() { *m = TaskQueueUpdateQueueRequest{} }
-func (m *TaskQueueUpdateQueueRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueUpdateQueueRequest) ProtoMessage() {}
-
-const Default_TaskQueueUpdateQueueRequest_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
-
-func (m *TaskQueueUpdateQueueRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetBucketRefillPerSecond() float64 {
- if m != nil && m.BucketRefillPerSecond != nil {
- return *m.BucketRefillPerSecond
- }
- return 0
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetBucketCapacity() int32 {
- if m != nil && m.BucketCapacity != nil {
- return *m.BucketCapacity
- }
- return 0
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetUserSpecifiedRate() string {
- if m != nil && m.UserSpecifiedRate != nil {
- return *m.UserSpecifiedRate
- }
- return ""
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetRetryParameters() *TaskQueueRetryParameters {
- if m != nil {
- return m.RetryParameters
- }
- return nil
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetMaxConcurrentRequests() int32 {
- if m != nil && m.MaxConcurrentRequests != nil {
- return *m.MaxConcurrentRequests
- }
- return 0
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetMode() TaskQueueMode_Mode {
- if m != nil && m.Mode != nil {
- return *m.Mode
- }
- return Default_TaskQueueUpdateQueueRequest_Mode
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetAcl() *TaskQueueAcl {
- if m != nil {
- return m.Acl
- }
- return nil
-}
-
-func (m *TaskQueueUpdateQueueRequest) GetHeaderOverride() []*TaskQueueHttpHeader {
- if m != nil {
- return m.HeaderOverride
- }
- return nil
-}
-
-type TaskQueueUpdateQueueResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueUpdateQueueResponse) Reset() { *m = TaskQueueUpdateQueueResponse{} }
-func (m *TaskQueueUpdateQueueResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueUpdateQueueResponse) ProtoMessage() {}
-
-type TaskQueueFetchQueuesRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- MaxRows *int32 `protobuf:"varint,2,req,name=max_rows" json:"max_rows,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueuesRequest) Reset() { *m = TaskQueueFetchQueuesRequest{} }
-func (m *TaskQueueFetchQueuesRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchQueuesRequest) ProtoMessage() {}
-
-func (m *TaskQueueFetchQueuesRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueuesRequest) GetMaxRows() int32 {
- if m != nil && m.MaxRows != nil {
- return *m.MaxRows
- }
- return 0
-}
-
-type TaskQueueFetchQueuesResponse struct {
- Queue []*TaskQueueFetchQueuesResponse_Queue `protobuf:"group,1,rep,name=Queue" json:"queue,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueuesResponse) Reset() { *m = TaskQueueFetchQueuesResponse{} }
-func (m *TaskQueueFetchQueuesResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchQueuesResponse) ProtoMessage() {}
-
-func (m *TaskQueueFetchQueuesResponse) GetQueue() []*TaskQueueFetchQueuesResponse_Queue {
- if m != nil {
- return m.Queue
- }
- return nil
-}
-
-type TaskQueueFetchQueuesResponse_Queue struct {
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- BucketRefillPerSecond *float64 `protobuf:"fixed64,3,req,name=bucket_refill_per_second" json:"bucket_refill_per_second,omitempty"`
- BucketCapacity *float64 `protobuf:"fixed64,4,req,name=bucket_capacity" json:"bucket_capacity,omitempty"`
- UserSpecifiedRate *string `protobuf:"bytes,5,opt,name=user_specified_rate" json:"user_specified_rate,omitempty"`
- Paused *bool `protobuf:"varint,6,req,name=paused,def=0" json:"paused,omitempty"`
- RetryParameters *TaskQueueRetryParameters `protobuf:"bytes,7,opt,name=retry_parameters" json:"retry_parameters,omitempty"`
- MaxConcurrentRequests *int32 `protobuf:"varint,8,opt,name=max_concurrent_requests" json:"max_concurrent_requests,omitempty"`
- Mode *TaskQueueMode_Mode `protobuf:"varint,9,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
- Acl *TaskQueueAcl `protobuf:"bytes,10,opt,name=acl" json:"acl,omitempty"`
- HeaderOverride []*TaskQueueHttpHeader `protobuf:"bytes,11,rep,name=header_override" json:"header_override,omitempty"`
- CreatorName *string `protobuf:"bytes,12,opt,name=creator_name,def=apphosting" json:"creator_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) Reset() { *m = TaskQueueFetchQueuesResponse_Queue{} }
-func (m *TaskQueueFetchQueuesResponse_Queue) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchQueuesResponse_Queue) ProtoMessage() {}
-
-const Default_TaskQueueFetchQueuesResponse_Queue_Paused bool = false
-const Default_TaskQueueFetchQueuesResponse_Queue_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
-const Default_TaskQueueFetchQueuesResponse_Queue_CreatorName string = "apphosting"
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetBucketRefillPerSecond() float64 {
- if m != nil && m.BucketRefillPerSecond != nil {
- return *m.BucketRefillPerSecond
- }
- return 0
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetBucketCapacity() float64 {
- if m != nil && m.BucketCapacity != nil {
- return *m.BucketCapacity
- }
- return 0
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetUserSpecifiedRate() string {
- if m != nil && m.UserSpecifiedRate != nil {
- return *m.UserSpecifiedRate
- }
- return ""
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetPaused() bool {
- if m != nil && m.Paused != nil {
- return *m.Paused
- }
- return Default_TaskQueueFetchQueuesResponse_Queue_Paused
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetRetryParameters() *TaskQueueRetryParameters {
- if m != nil {
- return m.RetryParameters
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetMaxConcurrentRequests() int32 {
- if m != nil && m.MaxConcurrentRequests != nil {
- return *m.MaxConcurrentRequests
- }
- return 0
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetMode() TaskQueueMode_Mode {
- if m != nil && m.Mode != nil {
- return *m.Mode
- }
- return Default_TaskQueueFetchQueuesResponse_Queue_Mode
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetAcl() *TaskQueueAcl {
- if m != nil {
- return m.Acl
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetHeaderOverride() []*TaskQueueHttpHeader {
- if m != nil {
- return m.HeaderOverride
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueuesResponse_Queue) GetCreatorName() string {
- if m != nil && m.CreatorName != nil {
- return *m.CreatorName
- }
- return Default_TaskQueueFetchQueuesResponse_Queue_CreatorName
-}
-
-type TaskQueueFetchQueueStatsRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName [][]byte `protobuf:"bytes,2,rep,name=queue_name" json:"queue_name,omitempty"`
- MaxNumTasks *int32 `protobuf:"varint,3,opt,name=max_num_tasks,def=0" json:"max_num_tasks,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueueStatsRequest) Reset() { *m = TaskQueueFetchQueueStatsRequest{} }
-func (m *TaskQueueFetchQueueStatsRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchQueueStatsRequest) ProtoMessage() {}
-
-const Default_TaskQueueFetchQueueStatsRequest_MaxNumTasks int32 = 0
-
-func (m *TaskQueueFetchQueueStatsRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueueStatsRequest) GetQueueName() [][]byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueFetchQueueStatsRequest) GetMaxNumTasks() int32 {
- if m != nil && m.MaxNumTasks != nil {
- return *m.MaxNumTasks
- }
- return Default_TaskQueueFetchQueueStatsRequest_MaxNumTasks
-}
-
-type TaskQueueScannerQueueInfo struct {
- ExecutedLastMinute *int64 `protobuf:"varint,1,req,name=executed_last_minute" json:"executed_last_minute,omitempty"`
- ExecutedLastHour *int64 `protobuf:"varint,2,req,name=executed_last_hour" json:"executed_last_hour,omitempty"`
- SamplingDurationSeconds *float64 `protobuf:"fixed64,3,req,name=sampling_duration_seconds" json:"sampling_duration_seconds,omitempty"`
- RequestsInFlight *int32 `protobuf:"varint,4,opt,name=requests_in_flight" json:"requests_in_flight,omitempty"`
- EnforcedRate *float64 `protobuf:"fixed64,5,opt,name=enforced_rate" json:"enforced_rate,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueScannerQueueInfo) Reset() { *m = TaskQueueScannerQueueInfo{} }
-func (m *TaskQueueScannerQueueInfo) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueScannerQueueInfo) ProtoMessage() {}
-
-func (m *TaskQueueScannerQueueInfo) GetExecutedLastMinute() int64 {
- if m != nil && m.ExecutedLastMinute != nil {
- return *m.ExecutedLastMinute
- }
- return 0
-}
-
-func (m *TaskQueueScannerQueueInfo) GetExecutedLastHour() int64 {
- if m != nil && m.ExecutedLastHour != nil {
- return *m.ExecutedLastHour
- }
- return 0
-}
-
-func (m *TaskQueueScannerQueueInfo) GetSamplingDurationSeconds() float64 {
- if m != nil && m.SamplingDurationSeconds != nil {
- return *m.SamplingDurationSeconds
- }
- return 0
-}
-
-func (m *TaskQueueScannerQueueInfo) GetRequestsInFlight() int32 {
- if m != nil && m.RequestsInFlight != nil {
- return *m.RequestsInFlight
- }
- return 0
-}
-
-func (m *TaskQueueScannerQueueInfo) GetEnforcedRate() float64 {
- if m != nil && m.EnforcedRate != nil {
- return *m.EnforcedRate
- }
- return 0
-}
-
-type TaskQueueFetchQueueStatsResponse struct {
- Queuestats []*TaskQueueFetchQueueStatsResponse_QueueStats `protobuf:"group,1,rep,name=QueueStats" json:"queuestats,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueueStatsResponse) Reset() { *m = TaskQueueFetchQueueStatsResponse{} }
-func (m *TaskQueueFetchQueueStatsResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchQueueStatsResponse) ProtoMessage() {}
-
-func (m *TaskQueueFetchQueueStatsResponse) GetQueuestats() []*TaskQueueFetchQueueStatsResponse_QueueStats {
- if m != nil {
- return m.Queuestats
- }
- return nil
-}
-
-type TaskQueueFetchQueueStatsResponse_QueueStats struct {
- NumTasks *int32 `protobuf:"varint,2,req,name=num_tasks" json:"num_tasks,omitempty"`
- OldestEtaUsec *int64 `protobuf:"varint,3,req,name=oldest_eta_usec" json:"oldest_eta_usec,omitempty"`
- ScannerInfo *TaskQueueScannerQueueInfo `protobuf:"bytes,4,opt,name=scanner_info" json:"scanner_info,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchQueueStatsResponse_QueueStats) Reset() {
- *m = TaskQueueFetchQueueStatsResponse_QueueStats{}
-}
-func (m *TaskQueueFetchQueueStatsResponse_QueueStats) String() string {
- return proto.CompactTextString(m)
-}
-func (*TaskQueueFetchQueueStatsResponse_QueueStats) ProtoMessage() {}
-
-func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetNumTasks() int32 {
- if m != nil && m.NumTasks != nil {
- return *m.NumTasks
- }
- return 0
-}
-
-func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetOldestEtaUsec() int64 {
- if m != nil && m.OldestEtaUsec != nil {
- return *m.OldestEtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetScannerInfo() *TaskQueueScannerQueueInfo {
- if m != nil {
- return m.ScannerInfo
- }
- return nil
-}
-
-type TaskQueuePauseQueueRequest struct {
- AppId []byte `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- Pause *bool `protobuf:"varint,3,req,name=pause" json:"pause,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueuePauseQueueRequest) Reset() { *m = TaskQueuePauseQueueRequest{} }
-func (m *TaskQueuePauseQueueRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueuePauseQueueRequest) ProtoMessage() {}
-
-func (m *TaskQueuePauseQueueRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueuePauseQueueRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueuePauseQueueRequest) GetPause() bool {
- if m != nil && m.Pause != nil {
- return *m.Pause
- }
- return false
-}
-
-type TaskQueuePauseQueueResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueuePauseQueueResponse) Reset() { *m = TaskQueuePauseQueueResponse{} }
-func (m *TaskQueuePauseQueueResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueuePauseQueueResponse) ProtoMessage() {}
-
-type TaskQueuePurgeQueueRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueuePurgeQueueRequest) Reset() { *m = TaskQueuePurgeQueueRequest{} }
-func (m *TaskQueuePurgeQueueRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueuePurgeQueueRequest) ProtoMessage() {}
-
-func (m *TaskQueuePurgeQueueRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueuePurgeQueueRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-type TaskQueuePurgeQueueResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueuePurgeQueueResponse) Reset() { *m = TaskQueuePurgeQueueResponse{} }
-func (m *TaskQueuePurgeQueueResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueuePurgeQueueResponse) ProtoMessage() {}
-
-type TaskQueueDeleteQueueRequest struct {
- AppId []byte `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteQueueRequest) Reset() { *m = TaskQueueDeleteQueueRequest{} }
-func (m *TaskQueueDeleteQueueRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteQueueRequest) ProtoMessage() {}
-
-func (m *TaskQueueDeleteQueueRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueDeleteQueueRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-type TaskQueueDeleteQueueResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteQueueResponse) Reset() { *m = TaskQueueDeleteQueueResponse{} }
-func (m *TaskQueueDeleteQueueResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteQueueResponse) ProtoMessage() {}
-
-type TaskQueueDeleteGroupRequest struct {
- AppId []byte `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteGroupRequest) Reset() { *m = TaskQueueDeleteGroupRequest{} }
-func (m *TaskQueueDeleteGroupRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteGroupRequest) ProtoMessage() {}
-
-func (m *TaskQueueDeleteGroupRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-type TaskQueueDeleteGroupResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueDeleteGroupResponse) Reset() { *m = TaskQueueDeleteGroupResponse{} }
-func (m *TaskQueueDeleteGroupResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueDeleteGroupResponse) ProtoMessage() {}
-
-type TaskQueueQueryTasksRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- StartTaskName []byte `protobuf:"bytes,3,opt,name=start_task_name" json:"start_task_name,omitempty"`
- StartEtaUsec *int64 `protobuf:"varint,4,opt,name=start_eta_usec" json:"start_eta_usec,omitempty"`
- StartTag []byte `protobuf:"bytes,6,opt,name=start_tag" json:"start_tag,omitempty"`
- MaxRows *int32 `protobuf:"varint,5,opt,name=max_rows,def=1" json:"max_rows,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksRequest) Reset() { *m = TaskQueueQueryTasksRequest{} }
-func (m *TaskQueueQueryTasksRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryTasksRequest) ProtoMessage() {}
-
-const Default_TaskQueueQueryTasksRequest_MaxRows int32 = 1
-
-func (m *TaskQueueQueryTasksRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksRequest) GetStartTaskName() []byte {
- if m != nil {
- return m.StartTaskName
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksRequest) GetStartEtaUsec() int64 {
- if m != nil && m.StartEtaUsec != nil {
- return *m.StartEtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksRequest) GetStartTag() []byte {
- if m != nil {
- return m.StartTag
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksRequest) GetMaxRows() int32 {
- if m != nil && m.MaxRows != nil {
- return *m.MaxRows
- }
- return Default_TaskQueueQueryTasksRequest_MaxRows
-}
-
-type TaskQueueQueryTasksResponse struct {
- Task []*TaskQueueQueryTasksResponse_Task `protobuf:"group,1,rep,name=Task" json:"task,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksResponse) Reset() { *m = TaskQueueQueryTasksResponse{} }
-func (m *TaskQueueQueryTasksResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryTasksResponse) ProtoMessage() {}
-
-func (m *TaskQueueQueryTasksResponse) GetTask() []*TaskQueueQueryTasksResponse_Task {
- if m != nil {
- return m.Task
- }
- return nil
-}
-
-type TaskQueueQueryTasksResponse_Task struct {
- TaskName []byte `protobuf:"bytes,2,req,name=task_name" json:"task_name,omitempty"`
- EtaUsec *int64 `protobuf:"varint,3,req,name=eta_usec" json:"eta_usec,omitempty"`
- Url []byte `protobuf:"bytes,4,opt,name=url" json:"url,omitempty"`
- Method *TaskQueueQueryTasksResponse_Task_RequestMethod `protobuf:"varint,5,opt,name=method,enum=appengine.TaskQueueQueryTasksResponse_Task_RequestMethod" json:"method,omitempty"`
- RetryCount *int32 `protobuf:"varint,6,opt,name=retry_count,def=0" json:"retry_count,omitempty"`
- Header []*TaskQueueQueryTasksResponse_Task_Header `protobuf:"group,7,rep,name=Header" json:"header,omitempty"`
- BodySize *int32 `protobuf:"varint,10,opt,name=body_size" json:"body_size,omitempty"`
- Body []byte `protobuf:"bytes,11,opt,name=body" json:"body,omitempty"`
- CreationTimeUsec *int64 `protobuf:"varint,12,req,name=creation_time_usec" json:"creation_time_usec,omitempty"`
- Crontimetable *TaskQueueQueryTasksResponse_Task_CronTimetable `protobuf:"group,13,opt,name=CronTimetable" json:"crontimetable,omitempty"`
- Runlog *TaskQueueQueryTasksResponse_Task_RunLog `protobuf:"group,16,opt,name=RunLog" json:"runlog,omitempty"`
- Description []byte `protobuf:"bytes,21,opt,name=description" json:"description,omitempty"`
- Payload *TaskPayload `protobuf:"bytes,22,opt,name=payload" json:"payload,omitempty"`
- RetryParameters *TaskQueueRetryParameters `protobuf:"bytes,23,opt,name=retry_parameters" json:"retry_parameters,omitempty"`
- FirstTryUsec *int64 `protobuf:"varint,24,opt,name=first_try_usec" json:"first_try_usec,omitempty"`
- Tag []byte `protobuf:"bytes,25,opt,name=tag" json:"tag,omitempty"`
- ExecutionCount *int32 `protobuf:"varint,26,opt,name=execution_count,def=0" json:"execution_count,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) Reset() { *m = TaskQueueQueryTasksResponse_Task{} }
-func (m *TaskQueueQueryTasksResponse_Task) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryTasksResponse_Task) ProtoMessage() {}
-
-const Default_TaskQueueQueryTasksResponse_Task_RetryCount int32 = 0
-const Default_TaskQueueQueryTasksResponse_Task_ExecutionCount int32 = 0
-
-func (m *TaskQueueQueryTasksResponse_Task) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetEtaUsec() int64 {
- if m != nil && m.EtaUsec != nil {
- return *m.EtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetUrl() []byte {
- if m != nil {
- return m.Url
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetMethod() TaskQueueQueryTasksResponse_Task_RequestMethod {
- if m != nil && m.Method != nil {
- return *m.Method
- }
- return TaskQueueQueryTasksResponse_Task_GET
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetRetryCount() int32 {
- if m != nil && m.RetryCount != nil {
- return *m.RetryCount
- }
- return Default_TaskQueueQueryTasksResponse_Task_RetryCount
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetHeader() []*TaskQueueQueryTasksResponse_Task_Header {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetBodySize() int32 {
- if m != nil && m.BodySize != nil {
- return *m.BodySize
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetBody() []byte {
- if m != nil {
- return m.Body
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetCreationTimeUsec() int64 {
- if m != nil && m.CreationTimeUsec != nil {
- return *m.CreationTimeUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetCrontimetable() *TaskQueueQueryTasksResponse_Task_CronTimetable {
- if m != nil {
- return m.Crontimetable
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetRunlog() *TaskQueueQueryTasksResponse_Task_RunLog {
- if m != nil {
- return m.Runlog
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetDescription() []byte {
- if m != nil {
- return m.Description
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetPayload() *TaskPayload {
- if m != nil {
- return m.Payload
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetRetryParameters() *TaskQueueRetryParameters {
- if m != nil {
- return m.RetryParameters
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetFirstTryUsec() int64 {
- if m != nil && m.FirstTryUsec != nil {
- return *m.FirstTryUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetTag() []byte {
- if m != nil {
- return m.Tag
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task) GetExecutionCount() int32 {
- if m != nil && m.ExecutionCount != nil {
- return *m.ExecutionCount
- }
- return Default_TaskQueueQueryTasksResponse_Task_ExecutionCount
-}
-
-type TaskQueueQueryTasksResponse_Task_Header struct {
- Key []byte `protobuf:"bytes,8,req,name=key" json:"key,omitempty"`
- Value []byte `protobuf:"bytes,9,req,name=value" json:"value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_Header) Reset() {
- *m = TaskQueueQueryTasksResponse_Task_Header{}
-}
-func (m *TaskQueueQueryTasksResponse_Task_Header) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryTasksResponse_Task_Header) ProtoMessage() {}
-
-func (m *TaskQueueQueryTasksResponse_Task_Header) GetKey() []byte {
- if m != nil {
- return m.Key
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_Header) GetValue() []byte {
- if m != nil {
- return m.Value
- }
- return nil
-}
-
-type TaskQueueQueryTasksResponse_Task_CronTimetable struct {
- Schedule []byte `protobuf:"bytes,14,req,name=schedule" json:"schedule,omitempty"`
- Timezone []byte `protobuf:"bytes,15,req,name=timezone" json:"timezone,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) Reset() {
- *m = TaskQueueQueryTasksResponse_Task_CronTimetable{}
-}
-func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) String() string {
- return proto.CompactTextString(m)
-}
-func (*TaskQueueQueryTasksResponse_Task_CronTimetable) ProtoMessage() {}
-
-func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) GetSchedule() []byte {
- if m != nil {
- return m.Schedule
- }
- return nil
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) GetTimezone() []byte {
- if m != nil {
- return m.Timezone
- }
- return nil
-}
-
-type TaskQueueQueryTasksResponse_Task_RunLog struct {
- DispatchedUsec *int64 `protobuf:"varint,17,req,name=dispatched_usec" json:"dispatched_usec,omitempty"`
- LagUsec *int64 `protobuf:"varint,18,req,name=lag_usec" json:"lag_usec,omitempty"`
- ElapsedUsec *int64 `protobuf:"varint,19,req,name=elapsed_usec" json:"elapsed_usec,omitempty"`
- ResponseCode *int64 `protobuf:"varint,20,opt,name=response_code" json:"response_code,omitempty"`
- RetryReason *string `protobuf:"bytes,27,opt,name=retry_reason" json:"retry_reason,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) Reset() {
- *m = TaskQueueQueryTasksResponse_Task_RunLog{}
-}
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryTasksResponse_Task_RunLog) ProtoMessage() {}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetDispatchedUsec() int64 {
- if m != nil && m.DispatchedUsec != nil {
- return *m.DispatchedUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetLagUsec() int64 {
- if m != nil && m.LagUsec != nil {
- return *m.LagUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetElapsedUsec() int64 {
- if m != nil && m.ElapsedUsec != nil {
- return *m.ElapsedUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetResponseCode() int64 {
- if m != nil && m.ResponseCode != nil {
- return *m.ResponseCode
- }
- return 0
-}
-
-func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetRetryReason() string {
- if m != nil && m.RetryReason != nil {
- return *m.RetryReason
- }
- return ""
-}
-
-type TaskQueueFetchTaskRequest struct {
- AppId []byte `protobuf:"bytes,1,opt,name=app_id" json:"app_id,omitempty"`
- QueueName []byte `protobuf:"bytes,2,req,name=queue_name" json:"queue_name,omitempty"`
- TaskName []byte `protobuf:"bytes,3,req,name=task_name" json:"task_name,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchTaskRequest) Reset() { *m = TaskQueueFetchTaskRequest{} }
-func (m *TaskQueueFetchTaskRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchTaskRequest) ProtoMessage() {}
-
-func (m *TaskQueueFetchTaskRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueFetchTaskRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueFetchTaskRequest) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-type TaskQueueFetchTaskResponse struct {
- Task *TaskQueueQueryTasksResponse `protobuf:"bytes,1,req,name=task" json:"task,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueFetchTaskResponse) Reset() { *m = TaskQueueFetchTaskResponse{} }
-func (m *TaskQueueFetchTaskResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueFetchTaskResponse) ProtoMessage() {}
-
-func (m *TaskQueueFetchTaskResponse) GetTask() *TaskQueueQueryTasksResponse {
- if m != nil {
- return m.Task
- }
- return nil
-}
-
-type TaskQueueUpdateStorageLimitRequest struct {
- AppId []byte `protobuf:"bytes,1,req,name=app_id" json:"app_id,omitempty"`
- Limit *int64 `protobuf:"varint,2,req,name=limit" json:"limit,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueUpdateStorageLimitRequest) Reset() { *m = TaskQueueUpdateStorageLimitRequest{} }
-func (m *TaskQueueUpdateStorageLimitRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueUpdateStorageLimitRequest) ProtoMessage() {}
-
-func (m *TaskQueueUpdateStorageLimitRequest) GetAppId() []byte {
- if m != nil {
- return m.AppId
- }
- return nil
-}
-
-func (m *TaskQueueUpdateStorageLimitRequest) GetLimit() int64 {
- if m != nil && m.Limit != nil {
- return *m.Limit
- }
- return 0
-}
-
-type TaskQueueUpdateStorageLimitResponse struct {
- NewLimit *int64 `protobuf:"varint,1,req,name=new_limit" json:"new_limit,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueUpdateStorageLimitResponse) Reset() { *m = TaskQueueUpdateStorageLimitResponse{} }
-func (m *TaskQueueUpdateStorageLimitResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueUpdateStorageLimitResponse) ProtoMessage() {}
-
-func (m *TaskQueueUpdateStorageLimitResponse) GetNewLimit() int64 {
- if m != nil && m.NewLimit != nil {
- return *m.NewLimit
- }
- return 0
-}
-
-type TaskQueueQueryAndOwnTasksRequest struct {
- QueueName []byte `protobuf:"bytes,1,req,name=queue_name" json:"queue_name,omitempty"`
- LeaseSeconds *float64 `protobuf:"fixed64,2,req,name=lease_seconds" json:"lease_seconds,omitempty"`
- MaxTasks *int64 `protobuf:"varint,3,req,name=max_tasks" json:"max_tasks,omitempty"`
- GroupByTag *bool `protobuf:"varint,4,opt,name=group_by_tag,def=0" json:"group_by_tag,omitempty"`
- Tag []byte `protobuf:"bytes,5,opt,name=tag" json:"tag,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryAndOwnTasksRequest) Reset() { *m = TaskQueueQueryAndOwnTasksRequest{} }
-func (m *TaskQueueQueryAndOwnTasksRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryAndOwnTasksRequest) ProtoMessage() {}
-
-const Default_TaskQueueQueryAndOwnTasksRequest_GroupByTag bool = false
-
-func (m *TaskQueueQueryAndOwnTasksRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueQueryAndOwnTasksRequest) GetLeaseSeconds() float64 {
- if m != nil && m.LeaseSeconds != nil {
- return *m.LeaseSeconds
- }
- return 0
-}
-
-func (m *TaskQueueQueryAndOwnTasksRequest) GetMaxTasks() int64 {
- if m != nil && m.MaxTasks != nil {
- return *m.MaxTasks
- }
- return 0
-}
-
-func (m *TaskQueueQueryAndOwnTasksRequest) GetGroupByTag() bool {
- if m != nil && m.GroupByTag != nil {
- return *m.GroupByTag
- }
- return Default_TaskQueueQueryAndOwnTasksRequest_GroupByTag
-}
-
-func (m *TaskQueueQueryAndOwnTasksRequest) GetTag() []byte {
- if m != nil {
- return m.Tag
- }
- return nil
-}
-
-type TaskQueueQueryAndOwnTasksResponse struct {
- Task []*TaskQueueQueryAndOwnTasksResponse_Task `protobuf:"group,1,rep,name=Task" json:"task,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse) Reset() { *m = TaskQueueQueryAndOwnTasksResponse{} }
-func (m *TaskQueueQueryAndOwnTasksResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryAndOwnTasksResponse) ProtoMessage() {}
-
-func (m *TaskQueueQueryAndOwnTasksResponse) GetTask() []*TaskQueueQueryAndOwnTasksResponse_Task {
- if m != nil {
- return m.Task
- }
- return nil
-}
-
-type TaskQueueQueryAndOwnTasksResponse_Task struct {
- TaskName []byte `protobuf:"bytes,2,req,name=task_name" json:"task_name,omitempty"`
- EtaUsec *int64 `protobuf:"varint,3,req,name=eta_usec" json:"eta_usec,omitempty"`
- RetryCount *int32 `protobuf:"varint,4,opt,name=retry_count,def=0" json:"retry_count,omitempty"`
- Body []byte `protobuf:"bytes,5,opt,name=body" json:"body,omitempty"`
- Tag []byte `protobuf:"bytes,6,opt,name=tag" json:"tag,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) Reset() {
- *m = TaskQueueQueryAndOwnTasksResponse_Task{}
-}
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueQueryAndOwnTasksResponse_Task) ProtoMessage() {}
-
-const Default_TaskQueueQueryAndOwnTasksResponse_Task_RetryCount int32 = 0
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetEtaUsec() int64 {
- if m != nil && m.EtaUsec != nil {
- return *m.EtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetRetryCount() int32 {
- if m != nil && m.RetryCount != nil {
- return *m.RetryCount
- }
- return Default_TaskQueueQueryAndOwnTasksResponse_Task_RetryCount
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetBody() []byte {
- if m != nil {
- return m.Body
- }
- return nil
-}
-
-func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetTag() []byte {
- if m != nil {
- return m.Tag
- }
- return nil
-}
-
-type TaskQueueModifyTaskLeaseRequest struct {
- QueueName []byte `protobuf:"bytes,1,req,name=queue_name" json:"queue_name,omitempty"`
- TaskName []byte `protobuf:"bytes,2,req,name=task_name" json:"task_name,omitempty"`
- EtaUsec *int64 `protobuf:"varint,3,req,name=eta_usec" json:"eta_usec,omitempty"`
- LeaseSeconds *float64 `protobuf:"fixed64,4,req,name=lease_seconds" json:"lease_seconds,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueModifyTaskLeaseRequest) Reset() { *m = TaskQueueModifyTaskLeaseRequest{} }
-func (m *TaskQueueModifyTaskLeaseRequest) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueModifyTaskLeaseRequest) ProtoMessage() {}
-
-func (m *TaskQueueModifyTaskLeaseRequest) GetQueueName() []byte {
- if m != nil {
- return m.QueueName
- }
- return nil
-}
-
-func (m *TaskQueueModifyTaskLeaseRequest) GetTaskName() []byte {
- if m != nil {
- return m.TaskName
- }
- return nil
-}
-
-func (m *TaskQueueModifyTaskLeaseRequest) GetEtaUsec() int64 {
- if m != nil && m.EtaUsec != nil {
- return *m.EtaUsec
- }
- return 0
-}
-
-func (m *TaskQueueModifyTaskLeaseRequest) GetLeaseSeconds() float64 {
- if m != nil && m.LeaseSeconds != nil {
- return *m.LeaseSeconds
- }
- return 0
-}
-
-type TaskQueueModifyTaskLeaseResponse struct {
- UpdatedEtaUsec *int64 `protobuf:"varint,1,req,name=updated_eta_usec" json:"updated_eta_usec,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *TaskQueueModifyTaskLeaseResponse) Reset() { *m = TaskQueueModifyTaskLeaseResponse{} }
-func (m *TaskQueueModifyTaskLeaseResponse) String() string { return proto.CompactTextString(m) }
-func (*TaskQueueModifyTaskLeaseResponse) ProtoMessage() {}
-
-func (m *TaskQueueModifyTaskLeaseResponse) GetUpdatedEtaUsec() int64 {
- if m != nil && m.UpdatedEtaUsec != nil {
- return *m.UpdatedEtaUsec
- }
- return 0
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto b/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
deleted file mode 100644
index 419aaf5..0000000
--- a/vendor/google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
+++ /dev/null
@@ -1,342 +0,0 @@
-syntax = "proto2";
-option go_package = "taskqueue";
-
-import "google.golang.org/appengine/internal/datastore/datastore_v3.proto";
-
-package appengine;
-
-message TaskQueueServiceError {
- enum ErrorCode {
- OK = 0;
- UNKNOWN_QUEUE = 1;
- TRANSIENT_ERROR = 2;
- INTERNAL_ERROR = 3;
- TASK_TOO_LARGE = 4;
- INVALID_TASK_NAME = 5;
- INVALID_QUEUE_NAME = 6;
- INVALID_URL = 7;
- INVALID_QUEUE_RATE = 8;
- PERMISSION_DENIED = 9;
- TASK_ALREADY_EXISTS = 10;
- TOMBSTONED_TASK = 11;
- INVALID_ETA = 12;
- INVALID_REQUEST = 13;
- UNKNOWN_TASK = 14;
- TOMBSTONED_QUEUE = 15;
- DUPLICATE_TASK_NAME = 16;
- SKIPPED = 17;
- TOO_MANY_TASKS = 18;
- INVALID_PAYLOAD = 19;
- INVALID_RETRY_PARAMETERS = 20;
- INVALID_QUEUE_MODE = 21;
- ACL_LOOKUP_ERROR = 22;
- TRANSACTIONAL_REQUEST_TOO_LARGE = 23;
- INCORRECT_CREATOR_NAME = 24;
- TASK_LEASE_EXPIRED = 25;
- QUEUE_PAUSED = 26;
- INVALID_TAG = 27;
-
- // Reserved range for the Datastore error codes.
- // Original Datastore error code is shifted by DATASTORE_ERROR offset.
- DATASTORE_ERROR = 10000;
- }
-}
-
-message TaskPayload {
- extensions 10 to max;
- option message_set_wire_format = true;
-}
-
-message TaskQueueRetryParameters {
- optional int32 retry_limit = 1;
- optional int64 age_limit_sec = 2;
-
- optional double min_backoff_sec = 3 [default = 0.1];
- optional double max_backoff_sec = 4 [default = 3600];
- optional int32 max_doublings = 5 [default = 16];
-}
-
-message TaskQueueAcl {
- repeated bytes user_email = 1;
- repeated bytes writer_email = 2;
-}
-
-message TaskQueueHttpHeader {
- required bytes key = 1;
- required bytes value = 2;
-}
-
-message TaskQueueMode {
- enum Mode {
- PUSH = 0;
- PULL = 1;
- }
-}
-
-message TaskQueueAddRequest {
- required bytes queue_name = 1;
- required bytes task_name = 2;
- required int64 eta_usec = 3;
-
- enum RequestMethod {
- GET = 1;
- POST = 2;
- HEAD = 3;
- PUT = 4;
- DELETE = 5;
- }
- optional RequestMethod method = 5 [default=POST];
-
- optional bytes url = 4;
-
- repeated group Header = 6 {
- required bytes key = 7;
- required bytes value = 8;
- }
-
- optional bytes body = 9 [ctype=CORD];
- optional Transaction transaction = 10;
- optional bytes app_id = 11;
-
- optional group CronTimetable = 12 {
- required bytes schedule = 13;
- required bytes timezone = 14;
- }
-
- optional bytes description = 15;
- optional TaskPayload payload = 16;
- optional TaskQueueRetryParameters retry_parameters = 17;
- optional TaskQueueMode.Mode mode = 18 [default=PUSH];
- optional bytes tag = 19;
-}
-
-message TaskQueueAddResponse {
- optional bytes chosen_task_name = 1;
-}
-
-message TaskQueueBulkAddRequest {
- repeated TaskQueueAddRequest add_request = 1;
-}
-
-message TaskQueueBulkAddResponse {
- repeated group TaskResult = 1 {
- required TaskQueueServiceError.ErrorCode result = 2;
- optional bytes chosen_task_name = 3;
- }
-}
-
-message TaskQueueDeleteRequest {
- required bytes queue_name = 1;
- repeated bytes task_name = 2;
- optional bytes app_id = 3;
-}
-
-message TaskQueueDeleteResponse {
- repeated TaskQueueServiceError.ErrorCode result = 3;
-}
-
-message TaskQueueForceRunRequest {
- optional bytes app_id = 1;
- required bytes queue_name = 2;
- required bytes task_name = 3;
-}
-
-message TaskQueueForceRunResponse {
- required TaskQueueServiceError.ErrorCode result = 3;
-}
-
-message TaskQueueUpdateQueueRequest {
- optional bytes app_id = 1;
- required bytes queue_name = 2;
- required double bucket_refill_per_second = 3;
- required int32 bucket_capacity = 4;
- optional string user_specified_rate = 5;
- optional TaskQueueRetryParameters retry_parameters = 6;
- optional int32 max_concurrent_requests = 7;
- optional TaskQueueMode.Mode mode = 8 [default = PUSH];
- optional TaskQueueAcl acl = 9;
- repeated TaskQueueHttpHeader header_override = 10;
-}
-
-message TaskQueueUpdateQueueResponse {
-}
-
-message TaskQueueFetchQueuesRequest {
- optional bytes app_id = 1;
- required int32 max_rows = 2;
-}
-
-message TaskQueueFetchQueuesResponse {
- repeated group Queue = 1 {
- required bytes queue_name = 2;
- required double bucket_refill_per_second = 3;
- required double bucket_capacity = 4;
- optional string user_specified_rate = 5;
- required bool paused = 6 [default=false];
- optional TaskQueueRetryParameters retry_parameters = 7;
- optional int32 max_concurrent_requests = 8;
- optional TaskQueueMode.Mode mode = 9 [default = PUSH];
- optional TaskQueueAcl acl = 10;
- repeated TaskQueueHttpHeader header_override = 11;
- optional string creator_name = 12 [ctype=CORD, default="apphosting"];
- }
-}
-
-message TaskQueueFetchQueueStatsRequest {
- optional bytes app_id = 1;
- repeated bytes queue_name = 2;
- optional int32 max_num_tasks = 3 [default = 0];
-}
-
-message TaskQueueScannerQueueInfo {
- required int64 executed_last_minute = 1;
- required int64 executed_last_hour = 2;
- required double sampling_duration_seconds = 3;
- optional int32 requests_in_flight = 4;
- optional double enforced_rate = 5;
-}
-
-message TaskQueueFetchQueueStatsResponse {
- repeated group QueueStats = 1 {
- required int32 num_tasks = 2;
- required int64 oldest_eta_usec = 3;
- optional TaskQueueScannerQueueInfo scanner_info = 4;
- }
-}
-message TaskQueuePauseQueueRequest {
- required bytes app_id = 1;
- required bytes queue_name = 2;
- required bool pause = 3;
-}
-
-message TaskQueuePauseQueueResponse {
-}
-
-message TaskQueuePurgeQueueRequest {
- optional bytes app_id = 1;
- required bytes queue_name = 2;
-}
-
-message TaskQueuePurgeQueueResponse {
-}
-
-message TaskQueueDeleteQueueRequest {
- required bytes app_id = 1;
- required bytes queue_name = 2;
-}
-
-message TaskQueueDeleteQueueResponse {
-}
-
-message TaskQueueDeleteGroupRequest {
- required bytes app_id = 1;
-}
-
-message TaskQueueDeleteGroupResponse {
-}
-
-message TaskQueueQueryTasksRequest {
- optional bytes app_id = 1;
- required bytes queue_name = 2;
-
- optional bytes start_task_name = 3;
- optional int64 start_eta_usec = 4;
- optional bytes start_tag = 6;
- optional int32 max_rows = 5 [default = 1];
-}
-
-message TaskQueueQueryTasksResponse {
- repeated group Task = 1 {
- required bytes task_name = 2;
- required int64 eta_usec = 3;
- optional bytes url = 4;
-
- enum RequestMethod {
- GET = 1;
- POST = 2;
- HEAD = 3;
- PUT = 4;
- DELETE = 5;
- }
- optional RequestMethod method = 5;
-
- optional int32 retry_count = 6 [default=0];
-
- repeated group Header = 7 {
- required bytes key = 8;
- required bytes value = 9;
- }
-
- optional int32 body_size = 10;
- optional bytes body = 11 [ctype=CORD];
- required int64 creation_time_usec = 12;
-
- optional group CronTimetable = 13 {
- required bytes schedule = 14;
- required bytes timezone = 15;
- }
-
- optional group RunLog = 16 {
- required int64 dispatched_usec = 17;
- required int64 lag_usec = 18;
- required int64 elapsed_usec = 19;
- optional int64 response_code = 20;
- optional string retry_reason = 27;
- }
-
- optional bytes description = 21;
- optional TaskPayload payload = 22;
- optional TaskQueueRetryParameters retry_parameters = 23;
- optional int64 first_try_usec = 24;
- optional bytes tag = 25;
- optional int32 execution_count = 26 [default=0];
- }
-}
-
-message TaskQueueFetchTaskRequest {
- optional bytes app_id = 1;
- required bytes queue_name = 2;
- required bytes task_name = 3;
-}
-
-message TaskQueueFetchTaskResponse {
- required TaskQueueQueryTasksResponse task = 1;
-}
-
-message TaskQueueUpdateStorageLimitRequest {
- required bytes app_id = 1;
- required int64 limit = 2;
-}
-
-message TaskQueueUpdateStorageLimitResponse {
- required int64 new_limit = 1;
-}
-
-message TaskQueueQueryAndOwnTasksRequest {
- required bytes queue_name = 1;
- required double lease_seconds = 2;
- required int64 max_tasks = 3;
- optional bool group_by_tag = 4 [default=false];
- optional bytes tag = 5;
-}
-
-message TaskQueueQueryAndOwnTasksResponse {
- repeated group Task = 1 {
- required bytes task_name = 2;
- required int64 eta_usec = 3;
- optional int32 retry_count = 4 [default=0];
- optional bytes body = 5 [ctype=CORD];
- optional bytes tag = 6;
- }
-}
-
-message TaskQueueModifyTaskLeaseRequest {
- required bytes queue_name = 1;
- required bytes task_name = 2;
- required int64 eta_usec = 3;
- required double lease_seconds = 4;
-}
-
-message TaskQueueModifyTaskLeaseResponse {
- required int64 updated_eta_usec = 1;
-}
diff --git a/vendor/google.golang.org/appengine/internal/transaction.go b/vendor/google.golang.org/appengine/internal/transaction.go
deleted file mode 100644
index 28a6d18..0000000
--- a/vendor/google.golang.org/appengine/internal/transaction.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package internal
-
-// This file implements hooks for applying datastore transactions.
-
-import (
- "errors"
- "reflect"
-
- "github.com/golang/protobuf/proto"
- netcontext "golang.org/x/net/context"
-
- basepb "google.golang.org/appengine/internal/base"
- pb "google.golang.org/appengine/internal/datastore"
-)
-
-var transactionSetters = make(map[reflect.Type]reflect.Value)
-
-// RegisterTransactionSetter registers a function that sets transaction information
-// in a protocol buffer message. f should be a function with two arguments,
-// the first being a protocol buffer type, and the second being *datastore.Transaction.
-func RegisterTransactionSetter(f interface{}) {
- v := reflect.ValueOf(f)
- transactionSetters[v.Type().In(0)] = v
-}
-
-// applyTransaction applies the transaction t to message pb
-// by using the relevant setter passed to RegisterTransactionSetter.
-func applyTransaction(pb proto.Message, t *pb.Transaction) {
- v := reflect.ValueOf(pb)
- if f, ok := transactionSetters[v.Type()]; ok {
- f.Call([]reflect.Value{v, reflect.ValueOf(t)})
- }
-}
-
-var transactionKey = "used for *Transaction"
-
-func transactionFromContext(ctx netcontext.Context) *transaction {
- t, _ := ctx.Value(&transactionKey).(*transaction)
- return t
-}
-
-func withTransaction(ctx netcontext.Context, t *transaction) netcontext.Context {
- return netcontext.WithValue(ctx, &transactionKey, t)
-}
-
-type transaction struct {
- transaction pb.Transaction
- finished bool
-}
-
-var ErrConcurrentTransaction = errors.New("internal: concurrent transaction")
-
-func RunTransactionOnce(c netcontext.Context, f func(netcontext.Context) error, xg bool) error {
- if transactionFromContext(c) != nil {
- return errors.New("nested transactions are not supported")
- }
-
- // Begin the transaction.
- t := &transaction{}
- req := &pb.BeginTransactionRequest{
- App: proto.String(FullyQualifiedAppID(c)),
- }
- if xg {
- req.AllowMultipleEg = proto.Bool(true)
- }
- if err := Call(c, "datastore_v3", "BeginTransaction", req, &t.transaction); err != nil {
- return err
- }
-
- // Call f, rolling back the transaction if f returns a non-nil error, or panics.
- // The panic is not recovered.
- defer func() {
- if t.finished {
- return
- }
- t.finished = true
- // Ignore the error return value, since we are already returning a non-nil
- // error (or we're panicking).
- Call(c, "datastore_v3", "Rollback", &t.transaction, &basepb.VoidProto{})
- }()
- if err := f(withTransaction(c, t)); err != nil {
- return err
- }
- t.finished = true
-
- // Commit the transaction.
- res := &pb.CommitResponse{}
- err := Call(c, "datastore_v3", "Commit", &t.transaction, res)
- if ae, ok := err.(*APIError); ok {
- /* TODO: restore this conditional
- if appengine.IsDevAppServer() {
- */
- // The Python Dev AppServer raises an ApplicationError with error code 2 (which is
- // Error.CONCURRENT_TRANSACTION) and message "Concurrency exception.".
- if ae.Code == int32(pb.Error_BAD_REQUEST) && ae.Detail == "ApplicationError: 2 Concurrency exception." {
- return ErrConcurrentTransaction
- }
- if ae.Code == int32(pb.Error_CONCURRENT_TRANSACTION) {
- return ErrConcurrentTransaction
- }
- }
- return err
-}
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
deleted file mode 100644
index af463fb..0000000
--- a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
+++ /dev/null
@@ -1,355 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
-// DO NOT EDIT!
-
-/*
-Package urlfetch is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
-
-It has these top-level messages:
- URLFetchServiceError
- URLFetchRequest
- URLFetchResponse
-*/
-package urlfetch
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type URLFetchServiceError_ErrorCode int32
-
-const (
- URLFetchServiceError_OK URLFetchServiceError_ErrorCode = 0
- URLFetchServiceError_INVALID_URL URLFetchServiceError_ErrorCode = 1
- URLFetchServiceError_FETCH_ERROR URLFetchServiceError_ErrorCode = 2
- URLFetchServiceError_UNSPECIFIED_ERROR URLFetchServiceError_ErrorCode = 3
- URLFetchServiceError_RESPONSE_TOO_LARGE URLFetchServiceError_ErrorCode = 4
- URLFetchServiceError_DEADLINE_EXCEEDED URLFetchServiceError_ErrorCode = 5
- URLFetchServiceError_SSL_CERTIFICATE_ERROR URLFetchServiceError_ErrorCode = 6
- URLFetchServiceError_DNS_ERROR URLFetchServiceError_ErrorCode = 7
- URLFetchServiceError_CLOSED URLFetchServiceError_ErrorCode = 8
- URLFetchServiceError_INTERNAL_TRANSIENT_ERROR URLFetchServiceError_ErrorCode = 9
- URLFetchServiceError_TOO_MANY_REDIRECTS URLFetchServiceError_ErrorCode = 10
- URLFetchServiceError_MALFORMED_REPLY URLFetchServiceError_ErrorCode = 11
- URLFetchServiceError_CONNECTION_ERROR URLFetchServiceError_ErrorCode = 12
-)
-
-var URLFetchServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "INVALID_URL",
- 2: "FETCH_ERROR",
- 3: "UNSPECIFIED_ERROR",
- 4: "RESPONSE_TOO_LARGE",
- 5: "DEADLINE_EXCEEDED",
- 6: "SSL_CERTIFICATE_ERROR",
- 7: "DNS_ERROR",
- 8: "CLOSED",
- 9: "INTERNAL_TRANSIENT_ERROR",
- 10: "TOO_MANY_REDIRECTS",
- 11: "MALFORMED_REPLY",
- 12: "CONNECTION_ERROR",
-}
-var URLFetchServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "INVALID_URL": 1,
- "FETCH_ERROR": 2,
- "UNSPECIFIED_ERROR": 3,
- "RESPONSE_TOO_LARGE": 4,
- "DEADLINE_EXCEEDED": 5,
- "SSL_CERTIFICATE_ERROR": 6,
- "DNS_ERROR": 7,
- "CLOSED": 8,
- "INTERNAL_TRANSIENT_ERROR": 9,
- "TOO_MANY_REDIRECTS": 10,
- "MALFORMED_REPLY": 11,
- "CONNECTION_ERROR": 12,
-}
-
-func (x URLFetchServiceError_ErrorCode) Enum() *URLFetchServiceError_ErrorCode {
- p := new(URLFetchServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x URLFetchServiceError_ErrorCode) String() string {
- return proto.EnumName(URLFetchServiceError_ErrorCode_name, int32(x))
-}
-func (x *URLFetchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(URLFetchServiceError_ErrorCode_value, data, "URLFetchServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = URLFetchServiceError_ErrorCode(value)
- return nil
-}
-
-type URLFetchRequest_RequestMethod int32
-
-const (
- URLFetchRequest_GET URLFetchRequest_RequestMethod = 1
- URLFetchRequest_POST URLFetchRequest_RequestMethod = 2
- URLFetchRequest_HEAD URLFetchRequest_RequestMethod = 3
- URLFetchRequest_PUT URLFetchRequest_RequestMethod = 4
- URLFetchRequest_DELETE URLFetchRequest_RequestMethod = 5
- URLFetchRequest_PATCH URLFetchRequest_RequestMethod = 6
-)
-
-var URLFetchRequest_RequestMethod_name = map[int32]string{
- 1: "GET",
- 2: "POST",
- 3: "HEAD",
- 4: "PUT",
- 5: "DELETE",
- 6: "PATCH",
-}
-var URLFetchRequest_RequestMethod_value = map[string]int32{
- "GET": 1,
- "POST": 2,
- "HEAD": 3,
- "PUT": 4,
- "DELETE": 5,
- "PATCH": 6,
-}
-
-func (x URLFetchRequest_RequestMethod) Enum() *URLFetchRequest_RequestMethod {
- p := new(URLFetchRequest_RequestMethod)
- *p = x
- return p
-}
-func (x URLFetchRequest_RequestMethod) String() string {
- return proto.EnumName(URLFetchRequest_RequestMethod_name, int32(x))
-}
-func (x *URLFetchRequest_RequestMethod) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(URLFetchRequest_RequestMethod_value, data, "URLFetchRequest_RequestMethod")
- if err != nil {
- return err
- }
- *x = URLFetchRequest_RequestMethod(value)
- return nil
-}
-
-type URLFetchServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *URLFetchServiceError) Reset() { *m = URLFetchServiceError{} }
-func (m *URLFetchServiceError) String() string { return proto.CompactTextString(m) }
-func (*URLFetchServiceError) ProtoMessage() {}
-
-type URLFetchRequest struct {
- Method *URLFetchRequest_RequestMethod `protobuf:"varint,1,req,name=Method,enum=appengine.URLFetchRequest_RequestMethod" json:"Method,omitempty"`
- Url *string `protobuf:"bytes,2,req,name=Url" json:"Url,omitempty"`
- Header []*URLFetchRequest_Header `protobuf:"group,3,rep,name=Header" json:"header,omitempty"`
- Payload []byte `protobuf:"bytes,6,opt,name=Payload" json:"Payload,omitempty"`
- FollowRedirects *bool `protobuf:"varint,7,opt,name=FollowRedirects,def=1" json:"FollowRedirects,omitempty"`
- Deadline *float64 `protobuf:"fixed64,8,opt,name=Deadline" json:"Deadline,omitempty"`
- MustValidateServerCertificate *bool `protobuf:"varint,9,opt,name=MustValidateServerCertificate,def=1" json:"MustValidateServerCertificate,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *URLFetchRequest) Reset() { *m = URLFetchRequest{} }
-func (m *URLFetchRequest) String() string { return proto.CompactTextString(m) }
-func (*URLFetchRequest) ProtoMessage() {}
-
-const Default_URLFetchRequest_FollowRedirects bool = true
-const Default_URLFetchRequest_MustValidateServerCertificate bool = true
-
-func (m *URLFetchRequest) GetMethod() URLFetchRequest_RequestMethod {
- if m != nil && m.Method != nil {
- return *m.Method
- }
- return URLFetchRequest_GET
-}
-
-func (m *URLFetchRequest) GetUrl() string {
- if m != nil && m.Url != nil {
- return *m.Url
- }
- return ""
-}
-
-func (m *URLFetchRequest) GetHeader() []*URLFetchRequest_Header {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *URLFetchRequest) GetPayload() []byte {
- if m != nil {
- return m.Payload
- }
- return nil
-}
-
-func (m *URLFetchRequest) GetFollowRedirects() bool {
- if m != nil && m.FollowRedirects != nil {
- return *m.FollowRedirects
- }
- return Default_URLFetchRequest_FollowRedirects
-}
-
-func (m *URLFetchRequest) GetDeadline() float64 {
- if m != nil && m.Deadline != nil {
- return *m.Deadline
- }
- return 0
-}
-
-func (m *URLFetchRequest) GetMustValidateServerCertificate() bool {
- if m != nil && m.MustValidateServerCertificate != nil {
- return *m.MustValidateServerCertificate
- }
- return Default_URLFetchRequest_MustValidateServerCertificate
-}
-
-type URLFetchRequest_Header struct {
- Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
- Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *URLFetchRequest_Header) Reset() { *m = URLFetchRequest_Header{} }
-func (m *URLFetchRequest_Header) String() string { return proto.CompactTextString(m) }
-func (*URLFetchRequest_Header) ProtoMessage() {}
-
-func (m *URLFetchRequest_Header) GetKey() string {
- if m != nil && m.Key != nil {
- return *m.Key
- }
- return ""
-}
-
-func (m *URLFetchRequest_Header) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-type URLFetchResponse struct {
- Content []byte `protobuf:"bytes,1,opt,name=Content" json:"Content,omitempty"`
- StatusCode *int32 `protobuf:"varint,2,req,name=StatusCode" json:"StatusCode,omitempty"`
- Header []*URLFetchResponse_Header `protobuf:"group,3,rep,name=Header" json:"header,omitempty"`
- ContentWasTruncated *bool `protobuf:"varint,6,opt,name=ContentWasTruncated,def=0" json:"ContentWasTruncated,omitempty"`
- ExternalBytesSent *int64 `protobuf:"varint,7,opt,name=ExternalBytesSent" json:"ExternalBytesSent,omitempty"`
- ExternalBytesReceived *int64 `protobuf:"varint,8,opt,name=ExternalBytesReceived" json:"ExternalBytesReceived,omitempty"`
- FinalUrl *string `protobuf:"bytes,9,opt,name=FinalUrl" json:"FinalUrl,omitempty"`
- ApiCpuMilliseconds *int64 `protobuf:"varint,10,opt,name=ApiCpuMilliseconds,def=0" json:"ApiCpuMilliseconds,omitempty"`
- ApiBytesSent *int64 `protobuf:"varint,11,opt,name=ApiBytesSent,def=0" json:"ApiBytesSent,omitempty"`
- ApiBytesReceived *int64 `protobuf:"varint,12,opt,name=ApiBytesReceived,def=0" json:"ApiBytesReceived,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *URLFetchResponse) Reset() { *m = URLFetchResponse{} }
-func (m *URLFetchResponse) String() string { return proto.CompactTextString(m) }
-func (*URLFetchResponse) ProtoMessage() {}
-
-const Default_URLFetchResponse_ContentWasTruncated bool = false
-const Default_URLFetchResponse_ApiCpuMilliseconds int64 = 0
-const Default_URLFetchResponse_ApiBytesSent int64 = 0
-const Default_URLFetchResponse_ApiBytesReceived int64 = 0
-
-func (m *URLFetchResponse) GetContent() []byte {
- if m != nil {
- return m.Content
- }
- return nil
-}
-
-func (m *URLFetchResponse) GetStatusCode() int32 {
- if m != nil && m.StatusCode != nil {
- return *m.StatusCode
- }
- return 0
-}
-
-func (m *URLFetchResponse) GetHeader() []*URLFetchResponse_Header {
- if m != nil {
- return m.Header
- }
- return nil
-}
-
-func (m *URLFetchResponse) GetContentWasTruncated() bool {
- if m != nil && m.ContentWasTruncated != nil {
- return *m.ContentWasTruncated
- }
- return Default_URLFetchResponse_ContentWasTruncated
-}
-
-func (m *URLFetchResponse) GetExternalBytesSent() int64 {
- if m != nil && m.ExternalBytesSent != nil {
- return *m.ExternalBytesSent
- }
- return 0
-}
-
-func (m *URLFetchResponse) GetExternalBytesReceived() int64 {
- if m != nil && m.ExternalBytesReceived != nil {
- return *m.ExternalBytesReceived
- }
- return 0
-}
-
-func (m *URLFetchResponse) GetFinalUrl() string {
- if m != nil && m.FinalUrl != nil {
- return *m.FinalUrl
- }
- return ""
-}
-
-func (m *URLFetchResponse) GetApiCpuMilliseconds() int64 {
- if m != nil && m.ApiCpuMilliseconds != nil {
- return *m.ApiCpuMilliseconds
- }
- return Default_URLFetchResponse_ApiCpuMilliseconds
-}
-
-func (m *URLFetchResponse) GetApiBytesSent() int64 {
- if m != nil && m.ApiBytesSent != nil {
- return *m.ApiBytesSent
- }
- return Default_URLFetchResponse_ApiBytesSent
-}
-
-func (m *URLFetchResponse) GetApiBytesReceived() int64 {
- if m != nil && m.ApiBytesReceived != nil {
- return *m.ApiBytesReceived
- }
- return Default_URLFetchResponse_ApiBytesReceived
-}
-
-type URLFetchResponse_Header struct {
- Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
- Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *URLFetchResponse_Header) Reset() { *m = URLFetchResponse_Header{} }
-func (m *URLFetchResponse_Header) String() string { return proto.CompactTextString(m) }
-func (*URLFetchResponse_Header) ProtoMessage() {}
-
-func (m *URLFetchResponse_Header) GetKey() string {
- if m != nil && m.Key != nil {
- return *m.Key
- }
- return ""
-}
-
-func (m *URLFetchResponse_Header) GetValue() string {
- if m != nil && m.Value != nil {
- return *m.Value
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
deleted file mode 100644
index f695edf..0000000
--- a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
+++ /dev/null
@@ -1,64 +0,0 @@
-syntax = "proto2";
-option go_package = "urlfetch";
-
-package appengine;
-
-message URLFetchServiceError {
- enum ErrorCode {
- OK = 0;
- INVALID_URL = 1;
- FETCH_ERROR = 2;
- UNSPECIFIED_ERROR = 3;
- RESPONSE_TOO_LARGE = 4;
- DEADLINE_EXCEEDED = 5;
- SSL_CERTIFICATE_ERROR = 6;
- DNS_ERROR = 7;
- CLOSED = 8;
- INTERNAL_TRANSIENT_ERROR = 9;
- TOO_MANY_REDIRECTS = 10;
- MALFORMED_REPLY = 11;
- CONNECTION_ERROR = 12;
- }
-}
-
-message URLFetchRequest {
- enum RequestMethod {
- GET = 1;
- POST = 2;
- HEAD = 3;
- PUT = 4;
- DELETE = 5;
- PATCH = 6;
- }
- required RequestMethod Method = 1;
- required string Url = 2;
- repeated group Header = 3 {
- required string Key = 4;
- required string Value = 5;
- }
- optional bytes Payload = 6 [ctype=CORD];
-
- optional bool FollowRedirects = 7 [default=true];
-
- optional double Deadline = 8;
-
- optional bool MustValidateServerCertificate = 9 [default=true];
-}
-
-message URLFetchResponse {
- optional bytes Content = 1;
- required int32 StatusCode = 2;
- repeated group Header = 3 {
- required string Key = 4;
- required string Value = 5;
- }
- optional bool ContentWasTruncated = 6 [default=false];
- optional int64 ExternalBytesSent = 7;
- optional int64 ExternalBytesReceived = 8;
-
- optional string FinalUrl = 9;
-
- optional int64 ApiCpuMilliseconds = 10 [default=0];
- optional int64 ApiBytesSent = 11 [default=0];
- optional int64 ApiBytesReceived = 12 [default=0];
-}
diff --git a/vendor/google.golang.org/appengine/internal/user/user_service.pb.go b/vendor/google.golang.org/appengine/internal/user/user_service.pb.go
deleted file mode 100644
index 6b52ffc..0000000
--- a/vendor/google.golang.org/appengine/internal/user/user_service.pb.go
+++ /dev/null
@@ -1,289 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/user/user_service.proto
-// DO NOT EDIT!
-
-/*
-Package user is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/user/user_service.proto
-
-It has these top-level messages:
- UserServiceError
- CreateLoginURLRequest
- CreateLoginURLResponse
- CreateLogoutURLRequest
- CreateLogoutURLResponse
- GetOAuthUserRequest
- GetOAuthUserResponse
- CheckOAuthSignatureRequest
- CheckOAuthSignatureResponse
-*/
-package user
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type UserServiceError_ErrorCode int32
-
-const (
- UserServiceError_OK UserServiceError_ErrorCode = 0
- UserServiceError_REDIRECT_URL_TOO_LONG UserServiceError_ErrorCode = 1
- UserServiceError_NOT_ALLOWED UserServiceError_ErrorCode = 2
- UserServiceError_OAUTH_INVALID_TOKEN UserServiceError_ErrorCode = 3
- UserServiceError_OAUTH_INVALID_REQUEST UserServiceError_ErrorCode = 4
- UserServiceError_OAUTH_ERROR UserServiceError_ErrorCode = 5
-)
-
-var UserServiceError_ErrorCode_name = map[int32]string{
- 0: "OK",
- 1: "REDIRECT_URL_TOO_LONG",
- 2: "NOT_ALLOWED",
- 3: "OAUTH_INVALID_TOKEN",
- 4: "OAUTH_INVALID_REQUEST",
- 5: "OAUTH_ERROR",
-}
-var UserServiceError_ErrorCode_value = map[string]int32{
- "OK": 0,
- "REDIRECT_URL_TOO_LONG": 1,
- "NOT_ALLOWED": 2,
- "OAUTH_INVALID_TOKEN": 3,
- "OAUTH_INVALID_REQUEST": 4,
- "OAUTH_ERROR": 5,
-}
-
-func (x UserServiceError_ErrorCode) Enum() *UserServiceError_ErrorCode {
- p := new(UserServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x UserServiceError_ErrorCode) String() string {
- return proto.EnumName(UserServiceError_ErrorCode_name, int32(x))
-}
-func (x *UserServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(UserServiceError_ErrorCode_value, data, "UserServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = UserServiceError_ErrorCode(value)
- return nil
-}
-
-type UserServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *UserServiceError) Reset() { *m = UserServiceError{} }
-func (m *UserServiceError) String() string { return proto.CompactTextString(m) }
-func (*UserServiceError) ProtoMessage() {}
-
-type CreateLoginURLRequest struct {
- DestinationUrl *string `protobuf:"bytes,1,req,name=destination_url" json:"destination_url,omitempty"`
- AuthDomain *string `protobuf:"bytes,2,opt,name=auth_domain" json:"auth_domain,omitempty"`
- FederatedIdentity *string `protobuf:"bytes,3,opt,name=federated_identity,def=" json:"federated_identity,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateLoginURLRequest) Reset() { *m = CreateLoginURLRequest{} }
-func (m *CreateLoginURLRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateLoginURLRequest) ProtoMessage() {}
-
-func (m *CreateLoginURLRequest) GetDestinationUrl() string {
- if m != nil && m.DestinationUrl != nil {
- return *m.DestinationUrl
- }
- return ""
-}
-
-func (m *CreateLoginURLRequest) GetAuthDomain() string {
- if m != nil && m.AuthDomain != nil {
- return *m.AuthDomain
- }
- return ""
-}
-
-func (m *CreateLoginURLRequest) GetFederatedIdentity() string {
- if m != nil && m.FederatedIdentity != nil {
- return *m.FederatedIdentity
- }
- return ""
-}
-
-type CreateLoginURLResponse struct {
- LoginUrl *string `protobuf:"bytes,1,req,name=login_url" json:"login_url,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateLoginURLResponse) Reset() { *m = CreateLoginURLResponse{} }
-func (m *CreateLoginURLResponse) String() string { return proto.CompactTextString(m) }
-func (*CreateLoginURLResponse) ProtoMessage() {}
-
-func (m *CreateLoginURLResponse) GetLoginUrl() string {
- if m != nil && m.LoginUrl != nil {
- return *m.LoginUrl
- }
- return ""
-}
-
-type CreateLogoutURLRequest struct {
- DestinationUrl *string `protobuf:"bytes,1,req,name=destination_url" json:"destination_url,omitempty"`
- AuthDomain *string `protobuf:"bytes,2,opt,name=auth_domain" json:"auth_domain,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateLogoutURLRequest) Reset() { *m = CreateLogoutURLRequest{} }
-func (m *CreateLogoutURLRequest) String() string { return proto.CompactTextString(m) }
-func (*CreateLogoutURLRequest) ProtoMessage() {}
-
-func (m *CreateLogoutURLRequest) GetDestinationUrl() string {
- if m != nil && m.DestinationUrl != nil {
- return *m.DestinationUrl
- }
- return ""
-}
-
-func (m *CreateLogoutURLRequest) GetAuthDomain() string {
- if m != nil && m.AuthDomain != nil {
- return *m.AuthDomain
- }
- return ""
-}
-
-type CreateLogoutURLResponse struct {
- LogoutUrl *string `protobuf:"bytes,1,req,name=logout_url" json:"logout_url,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CreateLogoutURLResponse) Reset() { *m = CreateLogoutURLResponse{} }
-func (m *CreateLogoutURLResponse) String() string { return proto.CompactTextString(m) }
-func (*CreateLogoutURLResponse) ProtoMessage() {}
-
-func (m *CreateLogoutURLResponse) GetLogoutUrl() string {
- if m != nil && m.LogoutUrl != nil {
- return *m.LogoutUrl
- }
- return ""
-}
-
-type GetOAuthUserRequest struct {
- Scope *string `protobuf:"bytes,1,opt,name=scope" json:"scope,omitempty"`
- Scopes []string `protobuf:"bytes,2,rep,name=scopes" json:"scopes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetOAuthUserRequest) Reset() { *m = GetOAuthUserRequest{} }
-func (m *GetOAuthUserRequest) String() string { return proto.CompactTextString(m) }
-func (*GetOAuthUserRequest) ProtoMessage() {}
-
-func (m *GetOAuthUserRequest) GetScope() string {
- if m != nil && m.Scope != nil {
- return *m.Scope
- }
- return ""
-}
-
-func (m *GetOAuthUserRequest) GetScopes() []string {
- if m != nil {
- return m.Scopes
- }
- return nil
-}
-
-type GetOAuthUserResponse struct {
- Email *string `protobuf:"bytes,1,req,name=email" json:"email,omitempty"`
- UserId *string `protobuf:"bytes,2,req,name=user_id" json:"user_id,omitempty"`
- AuthDomain *string `protobuf:"bytes,3,req,name=auth_domain" json:"auth_domain,omitempty"`
- UserOrganization *string `protobuf:"bytes,4,opt,name=user_organization,def=" json:"user_organization,omitempty"`
- IsAdmin *bool `protobuf:"varint,5,opt,name=is_admin,def=0" json:"is_admin,omitempty"`
- ClientId *string `protobuf:"bytes,6,opt,name=client_id,def=" json:"client_id,omitempty"`
- Scopes []string `protobuf:"bytes,7,rep,name=scopes" json:"scopes,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *GetOAuthUserResponse) Reset() { *m = GetOAuthUserResponse{} }
-func (m *GetOAuthUserResponse) String() string { return proto.CompactTextString(m) }
-func (*GetOAuthUserResponse) ProtoMessage() {}
-
-const Default_GetOAuthUserResponse_IsAdmin bool = false
-
-func (m *GetOAuthUserResponse) GetEmail() string {
- if m != nil && m.Email != nil {
- return *m.Email
- }
- return ""
-}
-
-func (m *GetOAuthUserResponse) GetUserId() string {
- if m != nil && m.UserId != nil {
- return *m.UserId
- }
- return ""
-}
-
-func (m *GetOAuthUserResponse) GetAuthDomain() string {
- if m != nil && m.AuthDomain != nil {
- return *m.AuthDomain
- }
- return ""
-}
-
-func (m *GetOAuthUserResponse) GetUserOrganization() string {
- if m != nil && m.UserOrganization != nil {
- return *m.UserOrganization
- }
- return ""
-}
-
-func (m *GetOAuthUserResponse) GetIsAdmin() bool {
- if m != nil && m.IsAdmin != nil {
- return *m.IsAdmin
- }
- return Default_GetOAuthUserResponse_IsAdmin
-}
-
-func (m *GetOAuthUserResponse) GetClientId() string {
- if m != nil && m.ClientId != nil {
- return *m.ClientId
- }
- return ""
-}
-
-func (m *GetOAuthUserResponse) GetScopes() []string {
- if m != nil {
- return m.Scopes
- }
- return nil
-}
-
-type CheckOAuthSignatureRequest struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CheckOAuthSignatureRequest) Reset() { *m = CheckOAuthSignatureRequest{} }
-func (m *CheckOAuthSignatureRequest) String() string { return proto.CompactTextString(m) }
-func (*CheckOAuthSignatureRequest) ProtoMessage() {}
-
-type CheckOAuthSignatureResponse struct {
- OauthConsumerKey *string `protobuf:"bytes,1,req,name=oauth_consumer_key" json:"oauth_consumer_key,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *CheckOAuthSignatureResponse) Reset() { *m = CheckOAuthSignatureResponse{} }
-func (m *CheckOAuthSignatureResponse) String() string { return proto.CompactTextString(m) }
-func (*CheckOAuthSignatureResponse) ProtoMessage() {}
-
-func (m *CheckOAuthSignatureResponse) GetOauthConsumerKey() string {
- if m != nil && m.OauthConsumerKey != nil {
- return *m.OauthConsumerKey
- }
- return ""
-}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/user/user_service.proto b/vendor/google.golang.org/appengine/internal/user/user_service.proto
deleted file mode 100644
index f3e9693..0000000
--- a/vendor/google.golang.org/appengine/internal/user/user_service.proto
+++ /dev/null
@@ -1,58 +0,0 @@
-syntax = "proto2";
-option go_package = "user";
-
-package appengine;
-
-message UserServiceError {
- enum ErrorCode {
- OK = 0;
- REDIRECT_URL_TOO_LONG = 1;
- NOT_ALLOWED = 2;
- OAUTH_INVALID_TOKEN = 3;
- OAUTH_INVALID_REQUEST = 4;
- OAUTH_ERROR = 5;
- }
-}
-
-message CreateLoginURLRequest {
- required string destination_url = 1;
- optional string auth_domain = 2;
- optional string federated_identity = 3 [default = ""];
-}
-
-message CreateLoginURLResponse {
- required string login_url = 1;
-}
-
-message CreateLogoutURLRequest {
- required string destination_url = 1;
- optional string auth_domain = 2;
-}
-
-message CreateLogoutURLResponse {
- required string logout_url = 1;
-}
-
-message GetOAuthUserRequest {
- optional string scope = 1;
-
- repeated string scopes = 2;
-}
-
-message GetOAuthUserResponse {
- required string email = 1;
- required string user_id = 2;
- required string auth_domain = 3;
- optional string user_organization = 4 [default = ""];
- optional bool is_admin = 5 [default = false];
- optional string client_id = 6 [default = ""];
-
- repeated string scopes = 7;
-}
-
-message CheckOAuthSignatureRequest {
-}
-
-message CheckOAuthSignatureResponse {
- required string oauth_consumer_key = 1;
-}
diff --git a/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.pb.go b/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.pb.go
deleted file mode 100644
index 6d5b0ae..0000000
--- a/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.pb.go
+++ /dev/null
@@ -1,427 +0,0 @@
-// Code generated by protoc-gen-go.
-// source: google.golang.org/appengine/internal/xmpp/xmpp_service.proto
-// DO NOT EDIT!
-
-/*
-Package xmpp is a generated protocol buffer package.
-
-It is generated from these files:
- google.golang.org/appengine/internal/xmpp/xmpp_service.proto
-
-It has these top-level messages:
- XmppServiceError
- PresenceRequest
- PresenceResponse
- BulkPresenceRequest
- BulkPresenceResponse
- XmppMessageRequest
- XmppMessageResponse
- XmppSendPresenceRequest
- XmppSendPresenceResponse
- XmppInviteRequest
- XmppInviteResponse
-*/
-package xmpp
-
-import proto "github.com/golang/protobuf/proto"
-import fmt "fmt"
-import math "math"
-
-// Reference imports to suppress errors if they are not otherwise used.
-var _ = proto.Marshal
-var _ = fmt.Errorf
-var _ = math.Inf
-
-type XmppServiceError_ErrorCode int32
-
-const (
- XmppServiceError_UNSPECIFIED_ERROR XmppServiceError_ErrorCode = 1
- XmppServiceError_INVALID_JID XmppServiceError_ErrorCode = 2
- XmppServiceError_NO_BODY XmppServiceError_ErrorCode = 3
- XmppServiceError_INVALID_XML XmppServiceError_ErrorCode = 4
- XmppServiceError_INVALID_TYPE XmppServiceError_ErrorCode = 5
- XmppServiceError_INVALID_SHOW XmppServiceError_ErrorCode = 6
- XmppServiceError_EXCEEDED_MAX_SIZE XmppServiceError_ErrorCode = 7
- XmppServiceError_APPID_ALIAS_REQUIRED XmppServiceError_ErrorCode = 8
- XmppServiceError_NONDEFAULT_MODULE XmppServiceError_ErrorCode = 9
-)
-
-var XmppServiceError_ErrorCode_name = map[int32]string{
- 1: "UNSPECIFIED_ERROR",
- 2: "INVALID_JID",
- 3: "NO_BODY",
- 4: "INVALID_XML",
- 5: "INVALID_TYPE",
- 6: "INVALID_SHOW",
- 7: "EXCEEDED_MAX_SIZE",
- 8: "APPID_ALIAS_REQUIRED",
- 9: "NONDEFAULT_MODULE",
-}
-var XmppServiceError_ErrorCode_value = map[string]int32{
- "UNSPECIFIED_ERROR": 1,
- "INVALID_JID": 2,
- "NO_BODY": 3,
- "INVALID_XML": 4,
- "INVALID_TYPE": 5,
- "INVALID_SHOW": 6,
- "EXCEEDED_MAX_SIZE": 7,
- "APPID_ALIAS_REQUIRED": 8,
- "NONDEFAULT_MODULE": 9,
-}
-
-func (x XmppServiceError_ErrorCode) Enum() *XmppServiceError_ErrorCode {
- p := new(XmppServiceError_ErrorCode)
- *p = x
- return p
-}
-func (x XmppServiceError_ErrorCode) String() string {
- return proto.EnumName(XmppServiceError_ErrorCode_name, int32(x))
-}
-func (x *XmppServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(XmppServiceError_ErrorCode_value, data, "XmppServiceError_ErrorCode")
- if err != nil {
- return err
- }
- *x = XmppServiceError_ErrorCode(value)
- return nil
-}
-
-type PresenceResponse_SHOW int32
-
-const (
- PresenceResponse_NORMAL PresenceResponse_SHOW = 0
- PresenceResponse_AWAY PresenceResponse_SHOW = 1
- PresenceResponse_DO_NOT_DISTURB PresenceResponse_SHOW = 2
- PresenceResponse_CHAT PresenceResponse_SHOW = 3
- PresenceResponse_EXTENDED_AWAY PresenceResponse_SHOW = 4
-)
-
-var PresenceResponse_SHOW_name = map[int32]string{
- 0: "NORMAL",
- 1: "AWAY",
- 2: "DO_NOT_DISTURB",
- 3: "CHAT",
- 4: "EXTENDED_AWAY",
-}
-var PresenceResponse_SHOW_value = map[string]int32{
- "NORMAL": 0,
- "AWAY": 1,
- "DO_NOT_DISTURB": 2,
- "CHAT": 3,
- "EXTENDED_AWAY": 4,
-}
-
-func (x PresenceResponse_SHOW) Enum() *PresenceResponse_SHOW {
- p := new(PresenceResponse_SHOW)
- *p = x
- return p
-}
-func (x PresenceResponse_SHOW) String() string {
- return proto.EnumName(PresenceResponse_SHOW_name, int32(x))
-}
-func (x *PresenceResponse_SHOW) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(PresenceResponse_SHOW_value, data, "PresenceResponse_SHOW")
- if err != nil {
- return err
- }
- *x = PresenceResponse_SHOW(value)
- return nil
-}
-
-type XmppMessageResponse_XmppMessageStatus int32
-
-const (
- XmppMessageResponse_NO_ERROR XmppMessageResponse_XmppMessageStatus = 0
- XmppMessageResponse_INVALID_JID XmppMessageResponse_XmppMessageStatus = 1
- XmppMessageResponse_OTHER_ERROR XmppMessageResponse_XmppMessageStatus = 2
-)
-
-var XmppMessageResponse_XmppMessageStatus_name = map[int32]string{
- 0: "NO_ERROR",
- 1: "INVALID_JID",
- 2: "OTHER_ERROR",
-}
-var XmppMessageResponse_XmppMessageStatus_value = map[string]int32{
- "NO_ERROR": 0,
- "INVALID_JID": 1,
- "OTHER_ERROR": 2,
-}
-
-func (x XmppMessageResponse_XmppMessageStatus) Enum() *XmppMessageResponse_XmppMessageStatus {
- p := new(XmppMessageResponse_XmppMessageStatus)
- *p = x
- return p
-}
-func (x XmppMessageResponse_XmppMessageStatus) String() string {
- return proto.EnumName(XmppMessageResponse_XmppMessageStatus_name, int32(x))
-}
-func (x *XmppMessageResponse_XmppMessageStatus) UnmarshalJSON(data []byte) error {
- value, err := proto.UnmarshalJSONEnum(XmppMessageResponse_XmppMessageStatus_value, data, "XmppMessageResponse_XmppMessageStatus")
- if err != nil {
- return err
- }
- *x = XmppMessageResponse_XmppMessageStatus(value)
- return nil
-}
-
-type XmppServiceError struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppServiceError) Reset() { *m = XmppServiceError{} }
-func (m *XmppServiceError) String() string { return proto.CompactTextString(m) }
-func (*XmppServiceError) ProtoMessage() {}
-
-type PresenceRequest struct {
- Jid *string `protobuf:"bytes,1,req,name=jid" json:"jid,omitempty"`
- FromJid *string `protobuf:"bytes,2,opt,name=from_jid" json:"from_jid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PresenceRequest) Reset() { *m = PresenceRequest{} }
-func (m *PresenceRequest) String() string { return proto.CompactTextString(m) }
-func (*PresenceRequest) ProtoMessage() {}
-
-func (m *PresenceRequest) GetJid() string {
- if m != nil && m.Jid != nil {
- return *m.Jid
- }
- return ""
-}
-
-func (m *PresenceRequest) GetFromJid() string {
- if m != nil && m.FromJid != nil {
- return *m.FromJid
- }
- return ""
-}
-
-type PresenceResponse struct {
- IsAvailable *bool `protobuf:"varint,1,req,name=is_available" json:"is_available,omitempty"`
- Presence *PresenceResponse_SHOW `protobuf:"varint,2,opt,name=presence,enum=appengine.PresenceResponse_SHOW" json:"presence,omitempty"`
- Valid *bool `protobuf:"varint,3,opt,name=valid" json:"valid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *PresenceResponse) Reset() { *m = PresenceResponse{} }
-func (m *PresenceResponse) String() string { return proto.CompactTextString(m) }
-func (*PresenceResponse) ProtoMessage() {}
-
-func (m *PresenceResponse) GetIsAvailable() bool {
- if m != nil && m.IsAvailable != nil {
- return *m.IsAvailable
- }
- return false
-}
-
-func (m *PresenceResponse) GetPresence() PresenceResponse_SHOW {
- if m != nil && m.Presence != nil {
- return *m.Presence
- }
- return PresenceResponse_NORMAL
-}
-
-func (m *PresenceResponse) GetValid() bool {
- if m != nil && m.Valid != nil {
- return *m.Valid
- }
- return false
-}
-
-type BulkPresenceRequest struct {
- Jid []string `protobuf:"bytes,1,rep,name=jid" json:"jid,omitempty"`
- FromJid *string `protobuf:"bytes,2,opt,name=from_jid" json:"from_jid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BulkPresenceRequest) Reset() { *m = BulkPresenceRequest{} }
-func (m *BulkPresenceRequest) String() string { return proto.CompactTextString(m) }
-func (*BulkPresenceRequest) ProtoMessage() {}
-
-func (m *BulkPresenceRequest) GetJid() []string {
- if m != nil {
- return m.Jid
- }
- return nil
-}
-
-func (m *BulkPresenceRequest) GetFromJid() string {
- if m != nil && m.FromJid != nil {
- return *m.FromJid
- }
- return ""
-}
-
-type BulkPresenceResponse struct {
- PresenceResponse []*PresenceResponse `protobuf:"bytes,1,rep,name=presence_response" json:"presence_response,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *BulkPresenceResponse) Reset() { *m = BulkPresenceResponse{} }
-func (m *BulkPresenceResponse) String() string { return proto.CompactTextString(m) }
-func (*BulkPresenceResponse) ProtoMessage() {}
-
-func (m *BulkPresenceResponse) GetPresenceResponse() []*PresenceResponse {
- if m != nil {
- return m.PresenceResponse
- }
- return nil
-}
-
-type XmppMessageRequest struct {
- Jid []string `protobuf:"bytes,1,rep,name=jid" json:"jid,omitempty"`
- Body *string `protobuf:"bytes,2,req,name=body" json:"body,omitempty"`
- RawXml *bool `protobuf:"varint,3,opt,name=raw_xml,def=0" json:"raw_xml,omitempty"`
- Type *string `protobuf:"bytes,4,opt,name=type,def=chat" json:"type,omitempty"`
- FromJid *string `protobuf:"bytes,5,opt,name=from_jid" json:"from_jid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppMessageRequest) Reset() { *m = XmppMessageRequest{} }
-func (m *XmppMessageRequest) String() string { return proto.CompactTextString(m) }
-func (*XmppMessageRequest) ProtoMessage() {}
-
-const Default_XmppMessageRequest_RawXml bool = false
-const Default_XmppMessageRequest_Type string = "chat"
-
-func (m *XmppMessageRequest) GetJid() []string {
- if m != nil {
- return m.Jid
- }
- return nil
-}
-
-func (m *XmppMessageRequest) GetBody() string {
- if m != nil && m.Body != nil {
- return *m.Body
- }
- return ""
-}
-
-func (m *XmppMessageRequest) GetRawXml() bool {
- if m != nil && m.RawXml != nil {
- return *m.RawXml
- }
- return Default_XmppMessageRequest_RawXml
-}
-
-func (m *XmppMessageRequest) GetType() string {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return Default_XmppMessageRequest_Type
-}
-
-func (m *XmppMessageRequest) GetFromJid() string {
- if m != nil && m.FromJid != nil {
- return *m.FromJid
- }
- return ""
-}
-
-type XmppMessageResponse struct {
- Status []XmppMessageResponse_XmppMessageStatus `protobuf:"varint,1,rep,name=status,enum=appengine.XmppMessageResponse_XmppMessageStatus" json:"status,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppMessageResponse) Reset() { *m = XmppMessageResponse{} }
-func (m *XmppMessageResponse) String() string { return proto.CompactTextString(m) }
-func (*XmppMessageResponse) ProtoMessage() {}
-
-func (m *XmppMessageResponse) GetStatus() []XmppMessageResponse_XmppMessageStatus {
- if m != nil {
- return m.Status
- }
- return nil
-}
-
-type XmppSendPresenceRequest struct {
- Jid *string `protobuf:"bytes,1,req,name=jid" json:"jid,omitempty"`
- Type *string `protobuf:"bytes,2,opt,name=type" json:"type,omitempty"`
- Show *string `protobuf:"bytes,3,opt,name=show" json:"show,omitempty"`
- Status *string `protobuf:"bytes,4,opt,name=status" json:"status,omitempty"`
- FromJid *string `protobuf:"bytes,5,opt,name=from_jid" json:"from_jid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppSendPresenceRequest) Reset() { *m = XmppSendPresenceRequest{} }
-func (m *XmppSendPresenceRequest) String() string { return proto.CompactTextString(m) }
-func (*XmppSendPresenceRequest) ProtoMessage() {}
-
-func (m *XmppSendPresenceRequest) GetJid() string {
- if m != nil && m.Jid != nil {
- return *m.Jid
- }
- return ""
-}
-
-func (m *XmppSendPresenceRequest) GetType() string {
- if m != nil && m.Type != nil {
- return *m.Type
- }
- return ""
-}
-
-func (m *XmppSendPresenceRequest) GetShow() string {
- if m != nil && m.Show != nil {
- return *m.Show
- }
- return ""
-}
-
-func (m *XmppSendPresenceRequest) GetStatus() string {
- if m != nil && m.Status != nil {
- return *m.Status
- }
- return ""
-}
-
-func (m *XmppSendPresenceRequest) GetFromJid() string {
- if m != nil && m.FromJid != nil {
- return *m.FromJid
- }
- return ""
-}
-
-type XmppSendPresenceResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppSendPresenceResponse) Reset() { *m = XmppSendPresenceResponse{} }
-func (m *XmppSendPresenceResponse) String() string { return proto.CompactTextString(m) }
-func (*XmppSendPresenceResponse) ProtoMessage() {}
-
-type XmppInviteRequest struct {
- Jid *string `protobuf:"bytes,1,req,name=jid" json:"jid,omitempty"`
- FromJid *string `protobuf:"bytes,2,opt,name=from_jid" json:"from_jid,omitempty"`
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppInviteRequest) Reset() { *m = XmppInviteRequest{} }
-func (m *XmppInviteRequest) String() string { return proto.CompactTextString(m) }
-func (*XmppInviteRequest) ProtoMessage() {}
-
-func (m *XmppInviteRequest) GetJid() string {
- if m != nil && m.Jid != nil {
- return *m.Jid
- }
- return ""
-}
-
-func (m *XmppInviteRequest) GetFromJid() string {
- if m != nil && m.FromJid != nil {
- return *m.FromJid
- }
- return ""
-}
-
-type XmppInviteResponse struct {
- XXX_unrecognized []byte `json:"-"`
-}
-
-func (m *XmppInviteResponse) Reset() { *m = XmppInviteResponse{} }
-func (m *XmppInviteResponse) String() string { return proto.CompactTextString(m) }
-func (*XmppInviteResponse) ProtoMessage() {}
-
-func init() {
-}
diff --git a/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.proto b/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.proto
deleted file mode 100644
index 472d52e..0000000
--- a/vendor/google.golang.org/appengine/internal/xmpp/xmpp_service.proto
+++ /dev/null
@@ -1,83 +0,0 @@
-syntax = "proto2";
-option go_package = "xmpp";
-
-package appengine;
-
-message XmppServiceError {
- enum ErrorCode {
- UNSPECIFIED_ERROR = 1;
- INVALID_JID = 2;
- NO_BODY = 3;
- INVALID_XML = 4;
- INVALID_TYPE = 5;
- INVALID_SHOW = 6;
- EXCEEDED_MAX_SIZE = 7;
- APPID_ALIAS_REQUIRED = 8;
- NONDEFAULT_MODULE = 9;
- }
-}
-
-message PresenceRequest {
- required string jid = 1;
- optional string from_jid = 2;
-}
-
-message PresenceResponse {
- enum SHOW {
- NORMAL = 0;
- AWAY = 1;
- DO_NOT_DISTURB = 2;
- CHAT = 3;
- EXTENDED_AWAY = 4;
- }
-
- required bool is_available = 1;
- optional SHOW presence = 2;
- optional bool valid = 3;
-}
-
-message BulkPresenceRequest {
- repeated string jid = 1;
- optional string from_jid = 2;
-}
-
-message BulkPresenceResponse {
- repeated PresenceResponse presence_response = 1;
-}
-
-message XmppMessageRequest {
- repeated string jid = 1;
- required string body = 2;
- optional bool raw_xml = 3 [ default = false ];
- optional string type = 4 [ default = "chat" ];
- optional string from_jid = 5;
-}
-
-message XmppMessageResponse {
- enum XmppMessageStatus {
- NO_ERROR = 0;
- INVALID_JID = 1;
- OTHER_ERROR = 2;
- }
-
- repeated XmppMessageStatus status = 1;
-}
-
-message XmppSendPresenceRequest {
- required string jid = 1;
- optional string type = 2;
- optional string show = 3;
- optional string status = 4;
- optional string from_jid = 5;
-}
-
-message XmppSendPresenceResponse {
-}
-
-message XmppInviteRequest {
- required string jid = 1;
- optional string from_jid = 2;
-}
-
-message XmppInviteResponse {
-}
diff --git a/vendor/google.golang.org/appengine/log/api.go b/vendor/google.golang.org/appengine/log/api.go
deleted file mode 100644
index 24d5860..0000000
--- a/vendor/google.golang.org/appengine/log/api.go
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package log
-
-// This file implements the logging API.
-
-import (
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// Debugf formats its arguments according to the format, analogous to fmt.Printf,
-// and records the text as a log message at Debug level. The message will be associated
-// with the request linked with the provided context.
-func Debugf(ctx context.Context, format string, args ...interface{}) {
- internal.Logf(ctx, 0, format, args...)
-}
-
-// Infof is like Debugf, but at Info level.
-func Infof(ctx context.Context, format string, args ...interface{}) {
- internal.Logf(ctx, 1, format, args...)
-}
-
-// Warningf is like Debugf, but at Warning level.
-func Warningf(ctx context.Context, format string, args ...interface{}) {
- internal.Logf(ctx, 2, format, args...)
-}
-
-// Errorf is like Debugf, but at Error level.
-func Errorf(ctx context.Context, format string, args ...interface{}) {
- internal.Logf(ctx, 3, format, args...)
-}
-
-// Criticalf is like Debugf, but at Critical level.
-func Criticalf(ctx context.Context, format string, args ...interface{}) {
- internal.Logf(ctx, 4, format, args...)
-}
diff --git a/vendor/google.golang.org/appengine/log/log.go b/vendor/google.golang.org/appengine/log/log.go
deleted file mode 100644
index cd89e5c..0000000
--- a/vendor/google.golang.org/appengine/log/log.go
+++ /dev/null
@@ -1,323 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package log provides the means of querying an application's logs from
-within an App Engine application.
-
-Example:
- c := appengine.NewContext(r)
- query := &log.Query{
- AppLogs: true,
- Versions: []string{"1"},
- }
-
- for results := query.Run(c); ; {
- record, err := results.Next()
- if err == log.Done {
- log.Infof(c, "Done processing results")
- break
- }
- if err != nil {
- log.Errorf(c, "Failed to retrieve next log: %v", err)
- break
- }
- log.Infof(c, "Saw record %v", record)
- }
-*/
-package log
-
-import (
- "errors"
- "fmt"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/log"
-)
-
-// Query defines a logs query.
-type Query struct {
- // Start time specifies the earliest log to return (inclusive).
- StartTime time.Time
-
- // End time specifies the latest log to return (exclusive).
- EndTime time.Time
-
- // Offset specifies a position within the log stream to resume reading from,
- // and should come from a previously returned Record's field of the same name.
- Offset []byte
-
- // Incomplete controls whether active (incomplete) requests should be included.
- Incomplete bool
-
- // AppLogs indicates if application-level logs should be included.
- AppLogs bool
-
- // ApplyMinLevel indicates if MinLevel should be used to filter results.
- ApplyMinLevel bool
-
- // If ApplyMinLevel is true, only logs for requests with at least one
- // application log of MinLevel or higher will be returned.
- MinLevel int
-
- // Versions is the major version IDs whose logs should be retrieved.
- // Logs for specific modules can be retrieved by the specifying versions
- // in the form "module:version"; the default module is used if no module
- // is specified.
- Versions []string
-
- // A list of requests to search for instead of a time-based scan. Cannot be
- // combined with filtering options such as StartTime, EndTime, Offset,
- // Incomplete, ApplyMinLevel, or Versions.
- RequestIDs []string
-}
-
-// AppLog represents a single application-level log.
-type AppLog struct {
- Time time.Time
- Level int
- Message string
-}
-
-// Record contains all the information for a single web request.
-type Record struct {
- AppID string
- ModuleID string
- VersionID string
- RequestID []byte
- IP string
- Nickname string
- AppEngineRelease string
-
- // The time when this request started.
- StartTime time.Time
-
- // The time when this request finished.
- EndTime time.Time
-
- // Opaque cursor into the result stream.
- Offset []byte
-
- // The time required to process the request.
- Latency time.Duration
- MCycles int64
- Method string
- Resource string
- HTTPVersion string
- Status int32
-
- // The size of the request sent back to the client, in bytes.
- ResponseSize int64
- Referrer string
- UserAgent string
- URLMapEntry string
- Combined string
- Host string
-
- // The estimated cost of this request, in dollars.
- Cost float64
- TaskQueueName string
- TaskName string
- WasLoadingRequest bool
- PendingTime time.Duration
- Finished bool
- AppLogs []AppLog
-
- // Mostly-unique identifier for the instance that handled the request if available.
- InstanceID string
-}
-
-// Result represents the result of a query.
-type Result struct {
- logs []*Record
- context context.Context
- request *pb.LogReadRequest
- resultsSeen bool
- err error
-}
-
-// Next returns the next log record,
-func (qr *Result) Next() (*Record, error) {
- if qr.err != nil {
- return nil, qr.err
- }
- if len(qr.logs) > 0 {
- lr := qr.logs[0]
- qr.logs = qr.logs[1:]
- return lr, nil
- }
-
- if qr.request.Offset == nil && qr.resultsSeen {
- return nil, Done
- }
-
- if err := qr.run(); err != nil {
- // Errors here may be retried, so don't store the error.
- return nil, err
- }
-
- return qr.Next()
-}
-
-// Done is returned when a query iteration has completed.
-var Done = errors.New("log: query has no more results")
-
-// protoToAppLogs takes as input an array of pointers to LogLines, the internal
-// Protocol Buffer representation of a single application-level log,
-// and converts it to an array of AppLogs, the external representation
-// of an application-level log.
-func protoToAppLogs(logLines []*pb.LogLine) []AppLog {
- appLogs := make([]AppLog, len(logLines))
-
- for i, line := range logLines {
- appLogs[i] = AppLog{
- Time: time.Unix(0, *line.Time*1e3),
- Level: int(*line.Level),
- Message: *line.LogMessage,
- }
- }
-
- return appLogs
-}
-
-// protoToRecord converts a RequestLog, the internal Protocol Buffer
-// representation of a single request-level log, to a Record, its
-// corresponding external representation.
-func protoToRecord(rl *pb.RequestLog) *Record {
- offset, err := proto.Marshal(rl.Offset)
- if err != nil {
- offset = nil
- }
- return &Record{
- AppID: *rl.AppId,
- ModuleID: rl.GetModuleId(),
- VersionID: *rl.VersionId,
- RequestID: rl.RequestId,
- Offset: offset,
- IP: *rl.Ip,
- Nickname: rl.GetNickname(),
- AppEngineRelease: string(rl.GetAppEngineRelease()),
- StartTime: time.Unix(0, *rl.StartTime*1e3),
- EndTime: time.Unix(0, *rl.EndTime*1e3),
- Latency: time.Duration(*rl.Latency) * time.Microsecond,
- MCycles: *rl.Mcycles,
- Method: *rl.Method,
- Resource: *rl.Resource,
- HTTPVersion: *rl.HttpVersion,
- Status: *rl.Status,
- ResponseSize: *rl.ResponseSize,
- Referrer: rl.GetReferrer(),
- UserAgent: rl.GetUserAgent(),
- URLMapEntry: *rl.UrlMapEntry,
- Combined: *rl.Combined,
- Host: rl.GetHost(),
- Cost: rl.GetCost(),
- TaskQueueName: rl.GetTaskQueueName(),
- TaskName: rl.GetTaskName(),
- WasLoadingRequest: rl.GetWasLoadingRequest(),
- PendingTime: time.Duration(rl.GetPendingTime()) * time.Microsecond,
- Finished: rl.GetFinished(),
- AppLogs: protoToAppLogs(rl.Line),
- InstanceID: string(rl.GetCloneKey()),
- }
-}
-
-// Run starts a query for log records, which contain request and application
-// level log information.
-func (params *Query) Run(c context.Context) *Result {
- req, err := makeRequest(params, internal.FullyQualifiedAppID(c), appengine.VersionID(c))
- return &Result{
- context: c,
- request: req,
- err: err,
- }
-}
-
-func makeRequest(params *Query, appID, versionID string) (*pb.LogReadRequest, error) {
- req := &pb.LogReadRequest{}
- req.AppId = &appID
- if !params.StartTime.IsZero() {
- req.StartTime = proto.Int64(params.StartTime.UnixNano() / 1e3)
- }
- if !params.EndTime.IsZero() {
- req.EndTime = proto.Int64(params.EndTime.UnixNano() / 1e3)
- }
- if len(params.Offset) > 0 {
- var offset pb.LogOffset
- if err := proto.Unmarshal(params.Offset, &offset); err != nil {
- return nil, fmt.Errorf("bad Offset: %v", err)
- }
- req.Offset = &offset
- }
- if params.Incomplete {
- req.IncludeIncomplete = ¶ms.Incomplete
- }
- if params.AppLogs {
- req.IncludeAppLogs = ¶ms.AppLogs
- }
- if params.ApplyMinLevel {
- req.MinimumLogLevel = proto.Int32(int32(params.MinLevel))
- }
- if params.Versions == nil {
- // If no versions were specified, default to the default module at
- // the major version being used by this module.
- if i := strings.Index(versionID, "."); i >= 0 {
- versionID = versionID[:i]
- }
- req.VersionId = []string{versionID}
- } else {
- req.ModuleVersion = make([]*pb.LogModuleVersion, 0, len(params.Versions))
- for _, v := range params.Versions {
- var m *string
- if i := strings.Index(v, ":"); i >= 0 {
- m, v = proto.String(v[:i]), v[i+1:]
- }
- req.ModuleVersion = append(req.ModuleVersion, &pb.LogModuleVersion{
- ModuleId: m,
- VersionId: proto.String(v),
- })
- }
- }
- if params.RequestIDs != nil {
- ids := make([][]byte, len(params.RequestIDs))
- for i, v := range params.RequestIDs {
- ids[i] = []byte(v)
- }
- req.RequestId = ids
- }
-
- return req, nil
-}
-
-// run takes the query Result produced by a call to Run and updates it with
-// more Records. The updated Result contains a new set of logs as well as an
-// offset to where more logs can be found. We also convert the items in the
-// response from their internal representations to external versions of the
-// same structs.
-func (r *Result) run() error {
- res := &pb.LogReadResponse{}
- if err := internal.Call(r.context, "logservice", "Read", r.request, res); err != nil {
- return err
- }
-
- r.logs = make([]*Record, len(res.Log))
- r.request.Offset = res.Offset
- r.resultsSeen = true
-
- for i, log := range res.Log {
- r.logs[i] = protoToRecord(log)
- }
-
- return nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("logservice", pb.LogServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/mail/mail.go b/vendor/google.golang.org/appengine/mail/mail.go
deleted file mode 100644
index be0eb51..0000000
--- a/vendor/google.golang.org/appengine/mail/mail.go
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package mail provides the means of sending email from an
-App Engine application.
-
-Example:
- msg := &mail.Message{
- Sender: "romeo@montague.com",
- To: []string{"Juliet "},
- Subject: "See you tonight",
- Body: "Don't forget our plans. Hark, 'til later.",
- }
- if err := mail.Send(c, msg); err != nil {
- log.Errorf(c, "Alas, my user, the email failed to sendeth: %v", err)
- }
-*/
-package mail
-
-import (
- "net/mail"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- bpb "google.golang.org/appengine/internal/base"
- pb "google.golang.org/appengine/internal/mail"
-)
-
-// A Message represents an email message.
-// Addresses may be of any form permitted by RFC 822.
-type Message struct {
- // Sender must be set, and must be either an application admin
- // or the currently signed-in user.
- Sender string
- ReplyTo string // may be empty
-
- // At least one of these slices must have a non-zero length,
- // except when calling SendToAdmins.
- To, Cc, Bcc []string
-
- Subject string
-
- // At least one of Body or HTMLBody must be non-empty.
- Body string
- HTMLBody string
-
- Attachments []Attachment
-
- // Extra mail headers.
- // See https://cloud.google.com/appengine/docs/go/mail/
- // for permissible headers.
- Headers mail.Header
-}
-
-// An Attachment represents an email attachment.
-type Attachment struct {
- // Name must be set to a valid file name.
- Name string
- Data []byte
- ContentID string
-}
-
-// Send sends an email message.
-func Send(c context.Context, msg *Message) error {
- return send(c, "Send", msg)
-}
-
-// SendToAdmins sends an email message to the application's administrators.
-func SendToAdmins(c context.Context, msg *Message) error {
- return send(c, "SendToAdmins", msg)
-}
-
-func send(c context.Context, method string, msg *Message) error {
- req := &pb.MailMessage{
- Sender: &msg.Sender,
- To: msg.To,
- Cc: msg.Cc,
- Bcc: msg.Bcc,
- Subject: &msg.Subject,
- }
- if msg.ReplyTo != "" {
- req.ReplyTo = &msg.ReplyTo
- }
- if msg.Body != "" {
- req.TextBody = &msg.Body
- }
- if msg.HTMLBody != "" {
- req.HtmlBody = &msg.HTMLBody
- }
- if len(msg.Attachments) > 0 {
- req.Attachment = make([]*pb.MailAttachment, len(msg.Attachments))
- for i, att := range msg.Attachments {
- req.Attachment[i] = &pb.MailAttachment{
- FileName: proto.String(att.Name),
- Data: att.Data,
- }
- if att.ContentID != "" {
- req.Attachment[i].ContentID = proto.String(att.ContentID)
- }
- }
- }
- for key, vs := range msg.Headers {
- for _, v := range vs {
- req.Header = append(req.Header, &pb.MailHeader{
- Name: proto.String(key),
- Value: proto.String(v),
- })
- }
- }
- res := &bpb.VoidProto{}
- if err := internal.Call(c, "mail", method, req, res); err != nil {
- return err
- }
- return nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("mail", pb.MailServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/memcache/memcache.go b/vendor/google.golang.org/appengine/memcache/memcache.go
deleted file mode 100644
index 5155055..0000000
--- a/vendor/google.golang.org/appengine/memcache/memcache.go
+++ /dev/null
@@ -1,526 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package memcache provides a client for App Engine's distributed in-memory
-// key-value store for small chunks of arbitrary data.
-//
-// The fundamental operations get and set items, keyed by a string.
-//
-// item0, err := memcache.Get(c, "key")
-// if err != nil && err != memcache.ErrCacheMiss {
-// return err
-// }
-// if err == nil {
-// fmt.Fprintf(w, "memcache hit: Key=%q Val=[% x]\n", item0.Key, item0.Value)
-// } else {
-// fmt.Fprintf(w, "memcache miss\n")
-// }
-//
-// and
-//
-// item1 := &memcache.Item{
-// Key: "foo",
-// Value: []byte("bar"),
-// }
-// if err := memcache.Set(c, item1); err != nil {
-// return err
-// }
-package memcache
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "errors"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/memcache"
-)
-
-var (
- // ErrCacheMiss means that an operation failed
- // because the item wasn't present.
- ErrCacheMiss = errors.New("memcache: cache miss")
- // ErrCASConflict means that a CompareAndSwap call failed due to the
- // cached value being modified between the Get and the CompareAndSwap.
- // If the cached value was simply evicted rather than replaced,
- // ErrNotStored will be returned instead.
- ErrCASConflict = errors.New("memcache: compare-and-swap conflict")
- // ErrNoStats means that no statistics were available.
- ErrNoStats = errors.New("memcache: no statistics available")
- // ErrNotStored means that a conditional write operation (i.e. Add or
- // CompareAndSwap) failed because the condition was not satisfied.
- ErrNotStored = errors.New("memcache: item not stored")
- // ErrServerError means that a server error occurred.
- ErrServerError = errors.New("memcache: server error")
-)
-
-// Item is the unit of memcache gets and sets.
-type Item struct {
- // Key is the Item's key (250 bytes maximum).
- Key string
- // Value is the Item's value.
- Value []byte
- // Object is the Item's value for use with a Codec.
- Object interface{}
- // Flags are server-opaque flags whose semantics are entirely up to the
- // App Engine app.
- Flags uint32
- // Expiration is the maximum duration that the item will stay
- // in the cache.
- // The zero value means the Item has no expiration time.
- // Subsecond precision is ignored.
- // This is not set when getting items.
- Expiration time.Duration
- // casID is a client-opaque value used for compare-and-swap operations.
- // Zero means that compare-and-swap is not used.
- casID uint64
-}
-
-const (
- secondsIn30Years = 60 * 60 * 24 * 365 * 30 // from memcache server code
- thirtyYears = time.Duration(secondsIn30Years) * time.Second
-)
-
-// protoToItem converts a protocol buffer item to a Go struct.
-func protoToItem(p *pb.MemcacheGetResponse_Item) *Item {
- return &Item{
- Key: string(p.Key),
- Value: p.Value,
- Flags: p.GetFlags(),
- casID: p.GetCasId(),
- }
-}
-
-// If err is an appengine.MultiError, return its first element. Otherwise, return err.
-func singleError(err error) error {
- if me, ok := err.(appengine.MultiError); ok {
- return me[0]
- }
- return err
-}
-
-// Get gets the item for the given key. ErrCacheMiss is returned for a memcache
-// cache miss. The key must be at most 250 bytes in length.
-func Get(c context.Context, key string) (*Item, error) {
- m, err := GetMulti(c, []string{key})
- if err != nil {
- return nil, err
- }
- if _, ok := m[key]; !ok {
- return nil, ErrCacheMiss
- }
- return m[key], nil
-}
-
-// GetMulti is a batch version of Get. The returned map from keys to items may
-// have fewer elements than the input slice, due to memcache cache misses.
-// Each key must be at most 250 bytes in length.
-func GetMulti(c context.Context, key []string) (map[string]*Item, error) {
- if len(key) == 0 {
- return nil, nil
- }
- keyAsBytes := make([][]byte, len(key))
- for i, k := range key {
- keyAsBytes[i] = []byte(k)
- }
- req := &pb.MemcacheGetRequest{
- Key: keyAsBytes,
- ForCas: proto.Bool(true),
- }
- res := &pb.MemcacheGetResponse{}
- if err := internal.Call(c, "memcache", "Get", req, res); err != nil {
- return nil, err
- }
- m := make(map[string]*Item, len(res.Item))
- for _, p := range res.Item {
- t := protoToItem(p)
- m[t.Key] = t
- }
- return m, nil
-}
-
-// Delete deletes the item for the given key.
-// ErrCacheMiss is returned if the specified item can not be found.
-// The key must be at most 250 bytes in length.
-func Delete(c context.Context, key string) error {
- return singleError(DeleteMulti(c, []string{key}))
-}
-
-// DeleteMulti is a batch version of Delete.
-// If any keys cannot be found, an appengine.MultiError is returned.
-// Each key must be at most 250 bytes in length.
-func DeleteMulti(c context.Context, key []string) error {
- if len(key) == 0 {
- return nil
- }
- req := &pb.MemcacheDeleteRequest{
- Item: make([]*pb.MemcacheDeleteRequest_Item, len(key)),
- }
- for i, k := range key {
- req.Item[i] = &pb.MemcacheDeleteRequest_Item{Key: []byte(k)}
- }
- res := &pb.MemcacheDeleteResponse{}
- if err := internal.Call(c, "memcache", "Delete", req, res); err != nil {
- return err
- }
- if len(res.DeleteStatus) != len(key) {
- return ErrServerError
- }
- me, any := make(appengine.MultiError, len(key)), false
- for i, s := range res.DeleteStatus {
- switch s {
- case pb.MemcacheDeleteResponse_DELETED:
- // OK
- case pb.MemcacheDeleteResponse_NOT_FOUND:
- me[i] = ErrCacheMiss
- any = true
- default:
- me[i] = ErrServerError
- any = true
- }
- }
- if any {
- return me
- }
- return nil
-}
-
-// Increment atomically increments the decimal value in the given key
-// by delta and returns the new value. The value must fit in a uint64.
-// Overflow wraps around, and underflow is capped to zero. The
-// provided delta may be negative. If the key doesn't exist in
-// memcache, the provided initial value is used to atomically
-// populate it before the delta is applied.
-// The key must be at most 250 bytes in length.
-func Increment(c context.Context, key string, delta int64, initialValue uint64) (newValue uint64, err error) {
- return incr(c, key, delta, &initialValue)
-}
-
-// IncrementExisting works like Increment but assumes that the key
-// already exists in memcache and doesn't take an initial value.
-// IncrementExisting can save work if calculating the initial value is
-// expensive.
-// An error is returned if the specified item can not be found.
-func IncrementExisting(c context.Context, key string, delta int64) (newValue uint64, err error) {
- return incr(c, key, delta, nil)
-}
-
-func incr(c context.Context, key string, delta int64, initialValue *uint64) (newValue uint64, err error) {
- req := &pb.MemcacheIncrementRequest{
- Key: []byte(key),
- InitialValue: initialValue,
- }
- if delta >= 0 {
- req.Delta = proto.Uint64(uint64(delta))
- } else {
- req.Delta = proto.Uint64(uint64(-delta))
- req.Direction = pb.MemcacheIncrementRequest_DECREMENT.Enum()
- }
- res := &pb.MemcacheIncrementResponse{}
- err = internal.Call(c, "memcache", "Increment", req, res)
- if err != nil {
- return
- }
- if res.NewValue == nil {
- return 0, ErrCacheMiss
- }
- return *res.NewValue, nil
-}
-
-// set sets the given items using the given conflict resolution policy.
-// appengine.MultiError may be returned.
-func set(c context.Context, item []*Item, value [][]byte, policy pb.MemcacheSetRequest_SetPolicy) error {
- if len(item) == 0 {
- return nil
- }
- req := &pb.MemcacheSetRequest{
- Item: make([]*pb.MemcacheSetRequest_Item, len(item)),
- }
- for i, t := range item {
- p := &pb.MemcacheSetRequest_Item{
- Key: []byte(t.Key),
- }
- if value == nil {
- p.Value = t.Value
- } else {
- p.Value = value[i]
- }
- if t.Flags != 0 {
- p.Flags = proto.Uint32(t.Flags)
- }
- if t.Expiration != 0 {
- // In the .proto file, MemcacheSetRequest_Item uses a fixed32 (i.e. unsigned)
- // for expiration time, while MemcacheGetRequest_Item uses int32 (i.e. signed).
- // Throughout this .go file, we use int32.
- // Also, in the proto, the expiration value is either a duration (in seconds)
- // or an absolute Unix timestamp (in seconds), depending on whether the
- // value is less than or greater than or equal to 30 years, respectively.
- if t.Expiration < time.Second {
- // Because an Expiration of 0 means no expiration, we take
- // care here to translate an item with an expiration
- // Duration between 0-1 seconds as immediately expiring
- // (saying it expired a few seconds ago), rather than
- // rounding it down to 0 and making it live forever.
- p.ExpirationTime = proto.Uint32(uint32(time.Now().Unix()) - 5)
- } else if t.Expiration >= thirtyYears {
- p.ExpirationTime = proto.Uint32(uint32(time.Now().Unix()) + uint32(t.Expiration/time.Second))
- } else {
- p.ExpirationTime = proto.Uint32(uint32(t.Expiration / time.Second))
- }
- }
- if t.casID != 0 {
- p.CasId = proto.Uint64(t.casID)
- p.ForCas = proto.Bool(true)
- }
- p.SetPolicy = policy.Enum()
- req.Item[i] = p
- }
- res := &pb.MemcacheSetResponse{}
- if err := internal.Call(c, "memcache", "Set", req, res); err != nil {
- return err
- }
- if len(res.SetStatus) != len(item) {
- return ErrServerError
- }
- me, any := make(appengine.MultiError, len(item)), false
- for i, st := range res.SetStatus {
- var err error
- switch st {
- case pb.MemcacheSetResponse_STORED:
- // OK
- case pb.MemcacheSetResponse_NOT_STORED:
- err = ErrNotStored
- case pb.MemcacheSetResponse_EXISTS:
- err = ErrCASConflict
- default:
- err = ErrServerError
- }
- if err != nil {
- me[i] = err
- any = true
- }
- }
- if any {
- return me
- }
- return nil
-}
-
-// Set writes the given item, unconditionally.
-func Set(c context.Context, item *Item) error {
- return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_SET))
-}
-
-// SetMulti is a batch version of Set.
-// appengine.MultiError may be returned.
-func SetMulti(c context.Context, item []*Item) error {
- return set(c, item, nil, pb.MemcacheSetRequest_SET)
-}
-
-// Add writes the given item, if no value already exists for its key.
-// ErrNotStored is returned if that condition is not met.
-func Add(c context.Context, item *Item) error {
- return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_ADD))
-}
-
-// AddMulti is a batch version of Add.
-// appengine.MultiError may be returned.
-func AddMulti(c context.Context, item []*Item) error {
- return set(c, item, nil, pb.MemcacheSetRequest_ADD)
-}
-
-// CompareAndSwap writes the given item that was previously returned by Get,
-// if the value was neither modified or evicted between the Get and the
-// CompareAndSwap calls. The item's Key should not change between calls but
-// all other item fields may differ.
-// ErrCASConflict is returned if the value was modified in between the calls.
-// ErrNotStored is returned if the value was evicted in between the calls.
-func CompareAndSwap(c context.Context, item *Item) error {
- return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_CAS))
-}
-
-// CompareAndSwapMulti is a batch version of CompareAndSwap.
-// appengine.MultiError may be returned.
-func CompareAndSwapMulti(c context.Context, item []*Item) error {
- return set(c, item, nil, pb.MemcacheSetRequest_CAS)
-}
-
-// Codec represents a symmetric pair of functions that implement a codec.
-// Items stored into or retrieved from memcache using a Codec have their
-// values marshaled or unmarshaled.
-//
-// All the methods provided for Codec behave analogously to the package level
-// function with same name.
-type Codec struct {
- Marshal func(interface{}) ([]byte, error)
- Unmarshal func([]byte, interface{}) error
-}
-
-// Get gets the item for the given key and decodes the obtained value into v.
-// ErrCacheMiss is returned for a memcache cache miss.
-// The key must be at most 250 bytes in length.
-func (cd Codec) Get(c context.Context, key string, v interface{}) (*Item, error) {
- i, err := Get(c, key)
- if err != nil {
- return nil, err
- }
- if err := cd.Unmarshal(i.Value, v); err != nil {
- return nil, err
- }
- return i, nil
-}
-
-func (cd Codec) set(c context.Context, items []*Item, policy pb.MemcacheSetRequest_SetPolicy) error {
- var vs [][]byte
- var me appengine.MultiError
- for i, item := range items {
- v, err := cd.Marshal(item.Object)
- if err != nil {
- if me == nil {
- me = make(appengine.MultiError, len(items))
- }
- me[i] = err
- continue
- }
- if me == nil {
- vs = append(vs, v)
- }
- }
- if me != nil {
- return me
- }
-
- return set(c, items, vs, policy)
-}
-
-// Set writes the given item, unconditionally.
-func (cd Codec) Set(c context.Context, item *Item) error {
- return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_SET))
-}
-
-// SetMulti is a batch version of Set.
-// appengine.MultiError may be returned.
-func (cd Codec) SetMulti(c context.Context, items []*Item) error {
- return cd.set(c, items, pb.MemcacheSetRequest_SET)
-}
-
-// Add writes the given item, if no value already exists for its key.
-// ErrNotStored is returned if that condition is not met.
-func (cd Codec) Add(c context.Context, item *Item) error {
- return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_ADD))
-}
-
-// AddMulti is a batch version of Add.
-// appengine.MultiError may be returned.
-func (cd Codec) AddMulti(c context.Context, items []*Item) error {
- return cd.set(c, items, pb.MemcacheSetRequest_ADD)
-}
-
-// CompareAndSwap writes the given item that was previously returned by Get,
-// if the value was neither modified or evicted between the Get and the
-// CompareAndSwap calls. The item's Key should not change between calls but
-// all other item fields may differ.
-// ErrCASConflict is returned if the value was modified in between the calls.
-// ErrNotStored is returned if the value was evicted in between the calls.
-func (cd Codec) CompareAndSwap(c context.Context, item *Item) error {
- return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_CAS))
-}
-
-// CompareAndSwapMulti is a batch version of CompareAndSwap.
-// appengine.MultiError may be returned.
-func (cd Codec) CompareAndSwapMulti(c context.Context, items []*Item) error {
- return cd.set(c, items, pb.MemcacheSetRequest_CAS)
-}
-
-var (
- // Gob is a Codec that uses the gob package.
- Gob = Codec{gobMarshal, gobUnmarshal}
- // JSON is a Codec that uses the json package.
- JSON = Codec{json.Marshal, json.Unmarshal}
-)
-
-func gobMarshal(v interface{}) ([]byte, error) {
- var buf bytes.Buffer
- if err := gob.NewEncoder(&buf).Encode(v); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-
-func gobUnmarshal(data []byte, v interface{}) error {
- return gob.NewDecoder(bytes.NewBuffer(data)).Decode(v)
-}
-
-// Statistics represents a set of statistics about the memcache cache.
-// This may include items that have expired but have not yet been removed from the cache.
-type Statistics struct {
- Hits uint64 // Counter of cache hits
- Misses uint64 // Counter of cache misses
- ByteHits uint64 // Counter of bytes transferred for gets
-
- Items uint64 // Items currently in the cache
- Bytes uint64 // Size of all items currently in the cache
-
- Oldest int64 // Age of access of the oldest item, in seconds
-}
-
-// Stats retrieves the current memcache statistics.
-func Stats(c context.Context) (*Statistics, error) {
- req := &pb.MemcacheStatsRequest{}
- res := &pb.MemcacheStatsResponse{}
- if err := internal.Call(c, "memcache", "Stats", req, res); err != nil {
- return nil, err
- }
- if res.Stats == nil {
- return nil, ErrNoStats
- }
- return &Statistics{
- Hits: *res.Stats.Hits,
- Misses: *res.Stats.Misses,
- ByteHits: *res.Stats.ByteHits,
- Items: *res.Stats.Items,
- Bytes: *res.Stats.Bytes,
- Oldest: int64(*res.Stats.OldestItemAge),
- }, nil
-}
-
-// Flush flushes all items from memcache.
-func Flush(c context.Context) error {
- req := &pb.MemcacheFlushRequest{}
- res := &pb.MemcacheFlushResponse{}
- return internal.Call(c, "memcache", "FlushAll", req, res)
-}
-
-func namespaceMod(m proto.Message, namespace string) {
- switch m := m.(type) {
- case *pb.MemcacheDeleteRequest:
- if m.NameSpace == nil {
- m.NameSpace = &namespace
- }
- case *pb.MemcacheGetRequest:
- if m.NameSpace == nil {
- m.NameSpace = &namespace
- }
- case *pb.MemcacheIncrementRequest:
- if m.NameSpace == nil {
- m.NameSpace = &namespace
- }
- case *pb.MemcacheSetRequest:
- if m.NameSpace == nil {
- m.NameSpace = &namespace
- }
- // MemcacheFlushRequest, MemcacheStatsRequest do not apply namespace.
- }
-}
-
-func init() {
- internal.RegisterErrorCodeMap("memcache", pb.MemcacheServiceError_ErrorCode_name)
- internal.NamespaceMods["memcache"] = namespaceMod
-}
diff --git a/vendor/google.golang.org/appengine/module/module.go b/vendor/google.golang.org/appengine/module/module.go
deleted file mode 100644
index aaf020c..0000000
--- a/vendor/google.golang.org/appengine/module/module.go
+++ /dev/null
@@ -1,113 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package module provides functions for interacting with modules.
-
-The appengine package contains functions that report the identity of the app,
-including the module name.
-*/
-package module
-
-import (
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/modules"
-)
-
-// List returns the names of modules belonging to this application.
-func List(c context.Context) ([]string, error) {
- req := &pb.GetModulesRequest{}
- res := &pb.GetModulesResponse{}
- err := internal.Call(c, "modules", "GetModules", req, res)
- return res.Module, err
-}
-
-// NumInstances returns the number of instances of the given module/version.
-// If either argument is the empty string it means the default.
-func NumInstances(c context.Context, module, version string) (int, error) {
- req := &pb.GetNumInstancesRequest{}
- if module != "" {
- req.Module = &module
- }
- if version != "" {
- req.Version = &version
- }
- res := &pb.GetNumInstancesResponse{}
-
- if err := internal.Call(c, "modules", "GetNumInstances", req, res); err != nil {
- return 0, err
- }
- return int(*res.Instances), nil
-}
-
-// SetNumInstances sets the number of instances of the given module.version to the
-// specified value. If either module or version are the empty string it means the
-// default.
-func SetNumInstances(c context.Context, module, version string, instances int) error {
- req := &pb.SetNumInstancesRequest{}
- if module != "" {
- req.Module = &module
- }
- if version != "" {
- req.Version = &version
- }
- req.Instances = proto.Int64(int64(instances))
- res := &pb.SetNumInstancesResponse{}
- return internal.Call(c, "modules", "SetNumInstances", req, res)
-}
-
-// Versions returns the names of the versions that belong to the specified module.
-// If module is the empty string, it means the default module.
-func Versions(c context.Context, module string) ([]string, error) {
- req := &pb.GetVersionsRequest{}
- if module != "" {
- req.Module = &module
- }
- res := &pb.GetVersionsResponse{}
- err := internal.Call(c, "modules", "GetVersions", req, res)
- return res.GetVersion(), err
-}
-
-// DefaultVersion returns the default version of the specified module.
-// If module is the empty string, it means the default module.
-func DefaultVersion(c context.Context, module string) (string, error) {
- req := &pb.GetDefaultVersionRequest{}
- if module != "" {
- req.Module = &module
- }
- res := &pb.GetDefaultVersionResponse{}
- err := internal.Call(c, "modules", "GetDefaultVersion", req, res)
- return res.GetVersion(), err
-}
-
-// Start starts the specified version of the specified module.
-// If either module or version are the empty string, it means the default.
-func Start(c context.Context, module, version string) error {
- req := &pb.StartModuleRequest{}
- if module != "" {
- req.Module = &module
- }
- if version != "" {
- req.Version = &version
- }
- res := &pb.StartModuleResponse{}
- return internal.Call(c, "modules", "StartModule", req, res)
-}
-
-// Stop stops the specified version of the specified module.
-// If either module or version are the empty string, it means the default.
-func Stop(c context.Context, module, version string) error {
- req := &pb.StopModuleRequest{}
- if module != "" {
- req.Module = &module
- }
- if version != "" {
- req.Version = &version
- }
- res := &pb.StopModuleResponse{}
- return internal.Call(c, "modules", "StopModule", req, res)
-}
diff --git a/vendor/google.golang.org/appengine/namespace.go b/vendor/google.golang.org/appengine/namespace.go
deleted file mode 100644
index 21860ca..0000000
--- a/vendor/google.golang.org/appengine/namespace.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package appengine
-
-import (
- "fmt"
- "regexp"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// Namespace returns a replacement context that operates within the given namespace.
-func Namespace(c context.Context, namespace string) (context.Context, error) {
- if !validNamespace.MatchString(namespace) {
- return nil, fmt.Errorf("appengine: namespace %q does not match /%s/", namespace, validNamespace)
- }
- return internal.NamespacedContext(c, namespace), nil
-}
-
-// validNamespace matches valid namespace names.
-var validNamespace = regexp.MustCompile(`^[0-9A-Za-z._-]{0,100}$`)
diff --git a/vendor/google.golang.org/appengine/remote_api/client.go b/vendor/google.golang.org/appengine/remote_api/client.go
deleted file mode 100644
index dbe219d..0000000
--- a/vendor/google.golang.org/appengine/remote_api/client.go
+++ /dev/null
@@ -1,174 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package remote_api
-
-// This file provides the client for connecting remotely to a user's production
-// application.
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "log"
- "math/rand"
- "net/http"
- "net/url"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/remote_api"
-)
-
-// NewRemoteContext returns a context that gives access to the production
-// APIs for the application at the given host. All communication will be
-// performed over SSL unless the host is localhost.
-func NewRemoteContext(host string, client *http.Client) (context.Context, error) {
- // Add an appcfg header to outgoing requests.
- t := client.Transport
- if t == nil {
- t = http.DefaultTransport
- }
- client.Transport = &headerAddingRoundTripper{t}
-
- url := url.URL{
- Scheme: "https",
- Host: host,
- Path: "/_ah/remote_api",
- }
- if host == "localhost" || strings.HasPrefix(host, "localhost:") {
- url.Scheme = "http"
- }
- u := url.String()
- appID, err := getAppID(client, u)
- if err != nil {
- return nil, fmt.Errorf("unable to contact server: %v", err)
- }
- rc := &remoteContext{
- client: client,
- url: u,
- }
- ctx := internal.WithCallOverride(context.Background(), rc.call)
- ctx = internal.WithLogOverride(ctx, rc.logf)
- ctx = internal.WithAppIDOverride(ctx, appID)
- return ctx, nil
-}
-
-type remoteContext struct {
- client *http.Client
- url string
-}
-
-var logLevels = map[int64]string{
- 0: "DEBUG",
- 1: "INFO",
- 2: "WARNING",
- 3: "ERROR",
- 4: "CRITICAL",
-}
-
-func (c *remoteContext) logf(level int64, format string, args ...interface{}) {
- log.Printf(logLevels[level]+": "+format, args...)
-}
-
-func (c *remoteContext) call(ctx context.Context, service, method string, in, out proto.Message) error {
- req, err := proto.Marshal(in)
- if err != nil {
- return fmt.Errorf("error marshalling request: %v", err)
- }
-
- remReq := &pb.Request{
- ServiceName: proto.String(service),
- Method: proto.String(method),
- Request: req,
- // NOTE(djd): RequestId is unused in the server.
- }
-
- req, err = proto.Marshal(remReq)
- if err != nil {
- return fmt.Errorf("proto.Marshal: %v", err)
- }
-
- // TODO(djd): Respect ctx.Deadline()?
- resp, err := c.client.Post(c.url, "application/octet-stream", bytes.NewReader(req))
- if err != nil {
- return fmt.Errorf("error sending request: %v", err)
- }
- defer resp.Body.Close()
-
- body, err := ioutil.ReadAll(resp.Body)
- if resp.StatusCode != http.StatusOK {
- return fmt.Errorf("bad response %d; body: %q", resp.StatusCode, body)
- }
- if err != nil {
- return fmt.Errorf("failed reading response: %v", err)
- }
- remResp := &pb.Response{}
- if err := proto.Unmarshal(body, remResp); err != nil {
- return fmt.Errorf("error unmarshalling response: %v", err)
- }
-
- if ae := remResp.GetApplicationError(); ae != nil {
- return &internal.APIError{
- Code: ae.GetCode(),
- Detail: ae.GetDetail(),
- Service: service,
- }
- }
-
- if remResp.Response == nil {
- return fmt.Errorf("unexpected response: %s", proto.MarshalTextString(remResp))
- }
-
- return proto.Unmarshal(remResp.Response, out)
-}
-
-// This is a forgiving regexp designed to parse the app ID from YAML.
-var appIDRE = regexp.MustCompile(`app_id["']?\s*:\s*['"]?([-a-z0-9.:~]+)`)
-
-func getAppID(client *http.Client, url string) (string, error) {
- // Generate a pseudo-random token for handshaking.
- token := strconv.Itoa(rand.New(rand.NewSource(time.Now().UnixNano())).Int())
-
- resp, err := client.Get(fmt.Sprintf("%s?rtok=%s", url, token))
- if err != nil {
- return "", err
- }
- defer resp.Body.Close()
-
- body, err := ioutil.ReadAll(resp.Body)
- if resp.StatusCode != http.StatusOK {
- return "", fmt.Errorf("bad response %d; body: %q", resp.StatusCode, body)
- }
- if err != nil {
- return "", fmt.Errorf("failed reading response: %v", err)
- }
-
- // Check the token is present in response.
- if !bytes.Contains(body, []byte(token)) {
- return "", fmt.Errorf("token not found: want %q; body %q", token, body)
- }
-
- match := appIDRE.FindSubmatch(body)
- if match == nil {
- return "", fmt.Errorf("app ID not found: body %q", body)
- }
-
- return string(match[1]), nil
-}
-
-type headerAddingRoundTripper struct {
- Wrapped http.RoundTripper
-}
-
-func (t *headerAddingRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) {
- r.Header.Set("X-Appcfg-Api-Version", "1")
- return t.Wrapped.RoundTrip(r)
-}
diff --git a/vendor/google.golang.org/appengine/remote_api/remote_api.go b/vendor/google.golang.org/appengine/remote_api/remote_api.go
deleted file mode 100644
index 7b55793..0000000
--- a/vendor/google.golang.org/appengine/remote_api/remote_api.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package remote_api implements the /_ah/remote_api endpoint.
-This endpoint is used by offline tools such as the bulk loader.
-*/
-package remote_api
-
-import (
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "strconv"
-
- "github.com/golang/protobuf/proto"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/remote_api"
- "google.golang.org/appengine/log"
- "google.golang.org/appengine/user"
-)
-
-func init() {
- http.HandleFunc("/_ah/remote_api", handle)
-}
-
-func handle(w http.ResponseWriter, req *http.Request) {
- c := appengine.NewContext(req)
-
- u := user.Current(c)
- if u == nil {
- u, _ = user.CurrentOAuth(c,
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/appengine.apis",
- )
- }
-
- if u == nil || !u.Admin {
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- w.WriteHeader(http.StatusUnauthorized)
- io.WriteString(w, "You must be logged in as an administrator to access this.\n")
- return
- }
- if req.Header.Get("X-Appcfg-Api-Version") == "" {
- w.Header().Set("Content-Type", "text/plain; charset=utf-8")
- w.WriteHeader(http.StatusForbidden)
- io.WriteString(w, "This request did not contain a necessary header.\n")
- return
- }
-
- if req.Method != "POST" {
- // Response must be YAML.
- rtok := req.FormValue("rtok")
- if rtok == "" {
- rtok = "0"
- }
- w.Header().Set("Content-Type", "text/yaml; charset=utf-8")
- fmt.Fprintf(w, `{app_id: %q, rtok: %q}`, internal.FullyQualifiedAppID(c), rtok)
- return
- }
-
- defer req.Body.Close()
- body, err := ioutil.ReadAll(req.Body)
- if err != nil {
- w.WriteHeader(http.StatusBadRequest)
- log.Errorf(c, "Failed reading body: %v", err)
- return
- }
- remReq := &pb.Request{}
- if err := proto.Unmarshal(body, remReq); err != nil {
- w.WriteHeader(http.StatusBadRequest)
- log.Errorf(c, "Bad body: %v", err)
- return
- }
-
- service, method := *remReq.ServiceName, *remReq.Method
- if !requestSupported(service, method) {
- w.WriteHeader(http.StatusBadRequest)
- log.Errorf(c, "Unsupported RPC /%s.%s", service, method)
- return
- }
-
- rawReq := &rawMessage{remReq.Request}
- rawRes := &rawMessage{}
- err = internal.Call(c, service, method, rawReq, rawRes)
-
- remRes := &pb.Response{}
- if err == nil {
- remRes.Response = rawRes.buf
- } else if ae, ok := err.(*internal.APIError); ok {
- remRes.ApplicationError = &pb.ApplicationError{
- Code: &ae.Code,
- Detail: &ae.Detail,
- }
- } else {
- // This shouldn't normally happen.
- log.Errorf(c, "appengine/remote_api: Unexpected error of type %T: %v", err, err)
- remRes.ApplicationError = &pb.ApplicationError{
- Code: proto.Int32(0),
- Detail: proto.String(err.Error()),
- }
- }
- out, err := proto.Marshal(remRes)
- if err != nil {
- // This should not be possible.
- w.WriteHeader(500)
- log.Errorf(c, "proto.Marshal: %v", err)
- return
- }
-
- log.Infof(c, "Spooling %d bytes of response to /%s.%s", len(out), service, method)
- w.Header().Set("Content-Type", "application/octet-stream")
- w.Header().Set("Content-Length", strconv.Itoa(len(out)))
- w.Write(out)
-}
-
-// rawMessage is a protocol buffer type that is already serialised.
-// This allows the remote_api code here to handle messages
-// without having to know the real type.
-type rawMessage struct {
- buf []byte
-}
-
-func (rm *rawMessage) Marshal() ([]byte, error) {
- return rm.buf, nil
-}
-
-func (rm *rawMessage) Unmarshal(buf []byte) error {
- rm.buf = make([]byte, len(buf))
- copy(rm.buf, buf)
- return nil
-}
-
-func requestSupported(service, method string) bool {
- // This list of supported services is taken from SERVICE_PB_MAP in remote_api_services.py
- switch service {
- case "app_identity_service", "blobstore", "capability_service", "channel", "datastore_v3",
- "datastore_v4", "file", "images", "logservice", "mail", "matcher", "memcache", "remote_datastore",
- "remote_socket", "search", "modules", "system", "taskqueue", "urlfetch", "user", "xmpp":
- return true
- }
- return false
-}
-
-// Methods to satisfy proto.Message.
-func (rm *rawMessage) Reset() { rm.buf = nil }
-func (rm *rawMessage) String() string { return strconv.Quote(string(rm.buf)) }
-func (*rawMessage) ProtoMessage() {}
diff --git a/vendor/google.golang.org/appengine/runtime/runtime.go b/vendor/google.golang.org/appengine/runtime/runtime.go
deleted file mode 100644
index 7aca8f6..0000000
--- a/vendor/google.golang.org/appengine/runtime/runtime.go
+++ /dev/null
@@ -1,148 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package runtime exposes information about the resource usage of the application.
-It also provides a way to run code in a new background context of a module.
-
-This package does not work on Managed VMs.
-*/
-package runtime
-
-import (
- "net/http"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/system"
-)
-
-// Statistics represents the system's statistics.
-type Statistics struct {
- // CPU records the CPU consumed by this instance, in megacycles.
- CPU struct {
- Total float64
- Rate1M float64 // consumption rate over one minute
- Rate10M float64 // consumption rate over ten minutes
- }
- // RAM records the memory used by the instance, in megabytes.
- RAM struct {
- Current float64
- Average1M float64 // average usage over one minute
- Average10M float64 // average usage over ten minutes
- }
-}
-
-func Stats(c context.Context) (*Statistics, error) {
- req := &pb.GetSystemStatsRequest{}
- res := &pb.GetSystemStatsResponse{}
- if err := internal.Call(c, "system", "GetSystemStats", req, res); err != nil {
- return nil, err
- }
- s := &Statistics{}
- if res.Cpu != nil {
- s.CPU.Total = res.Cpu.GetTotal()
- s.CPU.Rate1M = res.Cpu.GetRate1M()
- s.CPU.Rate10M = res.Cpu.GetRate10M()
- }
- if res.Memory != nil {
- s.RAM.Current = res.Memory.GetCurrent()
- s.RAM.Average1M = res.Memory.GetAverage1M()
- s.RAM.Average10M = res.Memory.GetAverage10M()
- }
- return s, nil
-}
-
-/*
-RunInBackground makes an API call that triggers an /_ah/background request.
-
-There are two independent code paths that need to make contact:
-the RunInBackground code, and the /_ah/background handler. The matchmaker
-loop arranges for the two paths to meet. The RunInBackground code passes
-a send to the matchmaker, the /_ah/background passes a recv to the matchmaker,
-and the matchmaker hooks them up.
-*/
-
-func init() {
- http.HandleFunc("/_ah/background", handleBackground)
-
- sc := make(chan send)
- rc := make(chan recv)
- sendc, recvc = sc, rc
- go matchmaker(sc, rc)
-}
-
-var (
- sendc chan<- send // RunInBackground sends to this
- recvc chan<- recv // handleBackground sends to this
-)
-
-type send struct {
- id string
- f func(context.Context)
-}
-
-type recv struct {
- id string
- ch chan<- func(context.Context)
-}
-
-func matchmaker(sendc <-chan send, recvc <-chan recv) {
- // When one side of the match arrives before the other
- // it is inserted in the corresponding map.
- waitSend := make(map[string]send)
- waitRecv := make(map[string]recv)
-
- for {
- select {
- case s := <-sendc:
- if r, ok := waitRecv[s.id]; ok {
- // meet!
- delete(waitRecv, s.id)
- r.ch <- s.f
- } else {
- // waiting for r
- waitSend[s.id] = s
- }
- case r := <-recvc:
- if s, ok := waitSend[r.id]; ok {
- // meet!
- delete(waitSend, r.id)
- r.ch <- s.f
- } else {
- // waiting for s
- waitRecv[r.id] = r
- }
- }
- }
-}
-
-var newContext = appengine.NewContext // for testing
-
-func handleBackground(w http.ResponseWriter, req *http.Request) {
- id := req.Header.Get("X-AppEngine-BackgroundRequest")
-
- ch := make(chan func(context.Context))
- recvc <- recv{id, ch}
- (<-ch)(newContext(req))
-}
-
-// RunInBackground runs f in a background goroutine in this process.
-// f is provided a context that may outlast the context provided to RunInBackground.
-// This is only valid to invoke from a manually scaled module.
-func RunInBackground(c context.Context, f func(c context.Context)) error {
- req := &pb.StartBackgroundRequestRequest{}
- res := &pb.StartBackgroundRequestResponse{}
- if err := internal.Call(c, "system", "StartBackgroundRequest", req, res); err != nil {
- return err
- }
- sendc <- send{res.GetRequestId(), f}
- return nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("system", pb.SystemServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/search/doc.go b/vendor/google.golang.org/appengine/search/doc.go
deleted file mode 100644
index 2392e7e..0000000
--- a/vendor/google.golang.org/appengine/search/doc.go
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package search provides a client for App Engine's search service.
-
-
-Basic Operations
-
-Indexes contain documents. Each index is identified by its name: a
-human-readable ASCII string.
-
-Within an index, documents are associated with an ID, which is also
-a human-readable ASCII string. A document's contents are a mapping from
-case-sensitive field names to values. Valid types for field values are:
- - string,
- - search.Atom,
- - search.HTML,
- - time.Time (stored with millisecond precision),
- - float64 (value between -2,147,483,647 and 2,147,483,647 inclusive),
- - appengine.GeoPoint.
-
-The Get and Put methods on an Index load and save a document.
-A document's contents are typically represented by a struct pointer.
-
-Example code:
-
- type Doc struct {
- Author string
- Comment string
- Creation time.Time
- }
-
- index, err := search.Open("comments")
- if err != nil {
- return err
- }
- newID, err := index.Put(ctx, "", &Doc{
- Author: "gopher",
- Comment: "the truth of the matter",
- Creation: time.Now(),
- })
- if err != nil {
- return err
- }
-
-A single document can be retrieved by its ID. Pass a destination struct
-to Get to hold the resulting document.
-
- var doc Doc
- err := index.Get(ctx, id, &doc)
- if err != nil {
- return err
- }
-
-
-Search and Listing Documents
-
-Indexes have two methods for retrieving multiple documents at once: Search and
-List.
-
-Searching an index for a query will result in an iterator. As with an iterator
-from package datastore, pass a destination struct to Next to decode the next
-result. Next will return Done when the iterator is exhausted.
-
- for t := index.Search(ctx, "Comment:truth", nil); ; {
- var doc Doc
- id, err := t.Next(&doc)
- if err == search.Done {
- break
- }
- if err != nil {
- return err
- }
- fmt.Fprintf(w, "%s -> %#v\n", id, doc)
- }
-
-Search takes a string query to determine which documents to return. The query
-can be simple, such as a single word to match, or complex. The query
-language is described at
-https://cloud.google.com/appengine/docs/go/search/query_strings
-
-Search also takes an optional SearchOptions struct which gives much more
-control over how results are calculated and returned.
-
-Call List to iterate over all documents in an index.
-
- for t := index.List(ctx, nil); ; {
- var doc Doc
- id, err := t.Next(&doc)
- if err == search.Done {
- break
- }
- if err != nil {
- return err
- }
- fmt.Fprintf(w, "%s -> %#v\n", id, doc)
- }
-
-
-Fields and Facets
-
-A document's contents can be represented by a variety of types. These are
-typically struct pointers, but they can also be represented by any type
-implementing the FieldLoadSaver interface. The FieldLoadSaver allows metadata
-to be set for the document with the DocumentMetadata type. Struct pointers are
-more strongly typed and are easier to use; FieldLoadSavers are more flexible.
-
-A document's contents can be expressed in two ways: fields and facets.
-
-Fields are the most common way of providing content for documents. Fields can
-store data in multiple types and can be matched in searches using query
-strings.
-
-Facets provide a way to attach categorical information to a document. The only
-valid types for facets are search.Atom and float64. Facets allow search
-results to contain summaries of the categories matched in a search, and to
-restrict searches to only match against specific categories.
-
-By default, for struct pointers, all of the struct fields are used as document
-fields, and the field name used is the same as on the struct (and hence must
-start with an upper case letter). Struct fields may have a
-`search:"name,options"` tag. The name must start with a letter and be
-composed only of word characters. If options is "facet" then the struct
-field will be used as a document facet. If options is "" then the comma
-may be omitted. There are no other recognized options.
-
-Example code:
-
- // A and B are renamed to a and b.
- // A, C and I are facets.
- // D's tag is equivalent to having no tag at all (E).
- // I has tag information for both the search and json packages.
- type TaggedStruct struct {
- A float64 `search:"a,facet"`
- B float64 `search:"b"`
- C float64 `search:",facet"`
- D float64 `search:""`
- E float64
- I float64 `search:",facet" json:"i"`
- }
-
-
-The FieldLoadSaver Interface
-
-A document's contents can also be represented by any type that implements the
-FieldLoadSaver interface. This type may be a struct pointer, but it
-does not have to be. The search package will call Load when loading the
-document's contents, and Save when saving them. In addition to a slice of
-Fields, the Load and Save methods also use the DocumentMetadata type to
-provide additional information about a document (such as its Rank, or set of
-Facets). Possible uses for this interface include deriving non-stored fields,
-verifying fields or setting specific languages for string and HTML fields.
-
-Example code:
-
- type CustomFieldsExample struct {
- // Item's title and which language it is in.
- Title string
- Lang string
- // Mass, in grams.
- Mass int
- }
-
- func (x *CustomFieldsExample) Load(fields []search.Field, meta *search.DocumentMetadata) error {
- // Load the title field, failing if any other field is found.
- for _, f := range fields {
- if f.Name != "title" {
- return fmt.Errorf("unknown field %q", f.Name)
- }
- s, ok := f.Value.(string)
- if !ok {
- return fmt.Errorf("unsupported type %T for field %q", f.Value, f.Name)
- }
- x.Title = s
- x.Lang = f.Language
- }
- // Load the mass facet, failing if any other facet is found.
- for _, f := range meta.Facets {
- if f.Name != "mass" {
- return fmt.Errorf("unknown facet %q", f.Name)
- }
- m, ok := f.Value.(float64)
- if !ok {
- return fmt.Errorf("unsupported type %T for facet %q", f.Value, f.Name)
- }
- x.Mass = int(m)
- }
- return nil
- }
-
- func (x *CustomFieldsExample) Save() ([]search.Field, *search.DocumentMetadata, error) {
- fields := []search.Field{
- {Name: "title", Value: x.Title, Language: x.Lang},
- }
- meta := &search.DocumentMetadata{
- Facets: {
- {Name: "mass", Value: float64(x.Mass)},
- },
- }
- return fields, meta, nil
- }
-*/
-package search
diff --git a/vendor/google.golang.org/appengine/search/field.go b/vendor/google.golang.org/appengine/search/field.go
deleted file mode 100644
index 707c2d8..0000000
--- a/vendor/google.golang.org/appengine/search/field.go
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package search
-
-// Field is a name/value pair. A search index's document can be loaded and
-// saved as a sequence of Fields.
-type Field struct {
- // Name is the field name. A valid field name matches /[A-Za-z][A-Za-z0-9_]*/.
- Name string
- // Value is the field value. The valid types are:
- // - string,
- // - search.Atom,
- // - search.HTML,
- // - time.Time (stored with millisecond precision),
- // - float64,
- // - GeoPoint.
- Value interface{}
- // Language is a two-letter ISO 639-1 code for the field's language,
- // defaulting to "en" if nothing is specified. It may only be specified for
- // fields of type string and search.HTML.
- Language string
- // Derived marks fields that were calculated as a result of a
- // FieldExpression provided to Search. This field is ignored when saving a
- // document.
- Derived bool
-}
-
-// Facet is a name/value pair which is used to add categorical information to a
-// document.
-type Facet struct {
- // Name is the facet name. A valid facet name matches /[A-Za-z][A-Za-z0-9_]*/.
- // A facet name cannot be longer than 500 characters.
- Name string
- // Value is the facet value.
- //
- // When being used in documents (for example, in
- // DocumentMetadata.Facets), the valid types are:
- // - search.Atom,
- // - float64.
- //
- // When being used in SearchOptions.Refinements or being returned
- // in FacetResult, the valid types are:
- // - search.Atom,
- // - search.Range.
- Value interface{}
-}
-
-// DocumentMetadata is a struct containing information describing a given document.
-type DocumentMetadata struct {
- // Rank is an integer specifying the order the document will be returned in
- // search results. If zero, the rank will be set to the number of seconds since
- // 2011-01-01 00:00:00 UTC when being Put into an index.
- Rank int
- // Facets is the set of facets for this document.
- Facets []Facet
-}
-
-// FieldLoadSaver can be converted from and to a slice of Fields
-// with additional document metadata.
-type FieldLoadSaver interface {
- Load([]Field, *DocumentMetadata) error
- Save() ([]Field, *DocumentMetadata, error)
-}
-
-// FieldList converts a []Field to implement FieldLoadSaver.
-type FieldList []Field
-
-// Load loads all of the provided fields into l.
-// It does not first reset *l to an empty slice.
-func (l *FieldList) Load(f []Field, _ *DocumentMetadata) error {
- *l = append(*l, f...)
- return nil
-}
-
-// Save returns all of l's fields as a slice of Fields.
-func (l *FieldList) Save() ([]Field, *DocumentMetadata, error) {
- return *l, nil, nil
-}
-
-var _ FieldLoadSaver = (*FieldList)(nil)
diff --git a/vendor/google.golang.org/appengine/search/search.go b/vendor/google.golang.org/appengine/search/search.go
deleted file mode 100644
index 7a26520..0000000
--- a/vendor/google.golang.org/appengine/search/search.go
+++ /dev/null
@@ -1,1109 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package search
-
-// TODO: let Put specify the document language: "en", "fr", etc. Also: order_id?? storage??
-// TODO: Index.GetAll (or Iterator.GetAll)?
-// TODO: struct <-> protobuf tests.
-// TODO: enforce Python's MIN_NUMBER_VALUE and MIN_DATE (which would disallow a zero
-// time.Time)? _MAXIMUM_STRING_LENGTH?
-
-import (
- "errors"
- "fmt"
- "math"
- "reflect"
- "regexp"
- "strconv"
- "strings"
- "time"
- "unicode/utf8"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/search"
-)
-
-var (
- // ErrInvalidDocumentType is returned when methods like Put, Get or Next
- // are passed a dst or src argument of invalid type.
- ErrInvalidDocumentType = errors.New("search: invalid document type")
-
- // ErrNoSuchDocument is returned when no document was found for a given ID.
- ErrNoSuchDocument = errors.New("search: no such document")
-)
-
-// Atom is a document field whose contents are indexed as a single indivisible
-// string.
-type Atom string
-
-// HTML is a document field whose contents are indexed as HTML. Only text nodes
-// are indexed: "foobar" will be treated as "foobar".
-type HTML string
-
-// validIndexNameOrDocID is the Go equivalent of Python's
-// _ValidateVisiblePrintableAsciiNotReserved.
-func validIndexNameOrDocID(s string) bool {
- if strings.HasPrefix(s, "!") {
- return false
- }
- for _, c := range s {
- if c < 0x21 || 0x7f <= c {
- return false
- }
- }
- return true
-}
-
-var (
- fieldNameRE = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9_]*$`)
- languageRE = regexp.MustCompile(`^[a-z]{2}$`)
-)
-
-// validFieldName is the Go equivalent of Python's _CheckFieldName. It checks
-// the validity of both field and facet names.
-func validFieldName(s string) bool {
- return len(s) <= 500 && fieldNameRE.MatchString(s)
-}
-
-// validDocRank checks that the ranks is in the range [0, 2^31).
-func validDocRank(r int) bool {
- return 0 <= r && r <= (1<<31-1)
-}
-
-// validLanguage checks that a language looks like ISO 639-1.
-func validLanguage(s string) bool {
- return languageRE.MatchString(s)
-}
-
-// validFloat checks that f is in the range [-2147483647, 2147483647].
-func validFloat(f float64) bool {
- return -(1<<31-1) <= f && f <= (1<<31-1)
-}
-
-// Index is an index of documents.
-type Index struct {
- spec pb.IndexSpec
-}
-
-// orderIDEpoch forms the basis for populating OrderId on documents.
-var orderIDEpoch = time.Date(2011, 1, 1, 0, 0, 0, 0, time.UTC)
-
-// Open opens the index with the given name. The index is created if it does
-// not already exist.
-//
-// The name is a human-readable ASCII string. It must contain no whitespace
-// characters and not start with "!".
-func Open(name string) (*Index, error) {
- if !validIndexNameOrDocID(name) {
- return nil, fmt.Errorf("search: invalid index name %q", name)
- }
- return &Index{
- spec: pb.IndexSpec{
- Name: &name,
- },
- }, nil
-}
-
-// Put saves src to the index. If id is empty, a new ID is allocated by the
-// service and returned. If id is not empty, any existing index entry for that
-// ID is replaced.
-//
-// The ID is a human-readable ASCII string. It must contain no whitespace
-// characters and not start with "!".
-//
-// src must be a non-nil struct pointer or implement the FieldLoadSaver
-// interface.
-func (x *Index) Put(c context.Context, id string, src interface{}) (string, error) {
- d, err := saveDoc(src)
- if err != nil {
- return "", err
- }
- if id != "" {
- if !validIndexNameOrDocID(id) {
- return "", fmt.Errorf("search: invalid ID %q", id)
- }
- d.Id = proto.String(id)
- }
- req := &pb.IndexDocumentRequest{
- Params: &pb.IndexDocumentParams{
- Document: []*pb.Document{d},
- IndexSpec: &x.spec,
- },
- }
- res := &pb.IndexDocumentResponse{}
- if err := internal.Call(c, "search", "IndexDocument", req, res); err != nil {
- return "", err
- }
- if len(res.Status) > 0 {
- if s := res.Status[0]; s.GetCode() != pb.SearchServiceError_OK {
- return "", fmt.Errorf("search: %s: %s", s.GetCode(), s.GetErrorDetail())
- }
- }
- if len(res.Status) != 1 || len(res.DocId) != 1 {
- return "", fmt.Errorf("search: internal error: wrong number of results (%d Statuses, %d DocIDs)",
- len(res.Status), len(res.DocId))
- }
- return res.DocId[0], nil
-}
-
-// Get loads the document with the given ID into dst.
-//
-// The ID is a human-readable ASCII string. It must be non-empty, contain no
-// whitespace characters and not start with "!".
-//
-// dst must be a non-nil struct pointer or implement the FieldLoadSaver
-// interface.
-//
-// ErrFieldMismatch is returned when a field is to be loaded into a different
-// type than the one it was stored from, or when a field is missing or
-// unexported in the destination struct. ErrFieldMismatch is only returned if
-// dst is a struct pointer. It is up to the callee to decide whether this error
-// is fatal, recoverable, or ignorable.
-func (x *Index) Get(c context.Context, id string, dst interface{}) error {
- if id == "" || !validIndexNameOrDocID(id) {
- return fmt.Errorf("search: invalid ID %q", id)
- }
- req := &pb.ListDocumentsRequest{
- Params: &pb.ListDocumentsParams{
- IndexSpec: &x.spec,
- StartDocId: proto.String(id),
- Limit: proto.Int32(1),
- },
- }
- res := &pb.ListDocumentsResponse{}
- if err := internal.Call(c, "search", "ListDocuments", req, res); err != nil {
- return err
- }
- if res.Status == nil || res.Status.GetCode() != pb.SearchServiceError_OK {
- return fmt.Errorf("search: %s: %s", res.Status.GetCode(), res.Status.GetErrorDetail())
- }
- if len(res.Document) != 1 || res.Document[0].GetId() != id {
- return ErrNoSuchDocument
- }
- return loadDoc(dst, res.Document[0], nil)
-}
-
-// Delete deletes a document from the index.
-func (x *Index) Delete(c context.Context, id string) error {
- req := &pb.DeleteDocumentRequest{
- Params: &pb.DeleteDocumentParams{
- DocId: []string{id},
- IndexSpec: &x.spec,
- },
- }
- res := &pb.DeleteDocumentResponse{}
- if err := internal.Call(c, "search", "DeleteDocument", req, res); err != nil {
- return err
- }
- if len(res.Status) != 1 {
- return fmt.Errorf("search: internal error: wrong number of results (%d)", len(res.Status))
- }
- if s := res.Status[0]; s.GetCode() != pb.SearchServiceError_OK {
- return fmt.Errorf("search: %s: %s", s.GetCode(), s.GetErrorDetail())
- }
- return nil
-}
-
-// List lists all of the documents in an index. The documents are returned in
-// increasing ID order.
-func (x *Index) List(c context.Context, opts *ListOptions) *Iterator {
- t := &Iterator{
- c: c,
- index: x,
- count: -1,
- listInclusive: true,
- more: moreList,
- }
- if opts != nil {
- t.listStartID = opts.StartID
- t.limit = opts.Limit
- t.idsOnly = opts.IDsOnly
- }
- return t
-}
-
-func moreList(t *Iterator) error {
- req := &pb.ListDocumentsRequest{
- Params: &pb.ListDocumentsParams{
- IndexSpec: &t.index.spec,
- },
- }
- if t.listStartID != "" {
- req.Params.StartDocId = &t.listStartID
- req.Params.IncludeStartDoc = &t.listInclusive
- }
- if t.limit > 0 {
- req.Params.Limit = proto.Int32(int32(t.limit))
- }
- if t.idsOnly {
- req.Params.KeysOnly = &t.idsOnly
- }
-
- res := &pb.ListDocumentsResponse{}
- if err := internal.Call(t.c, "search", "ListDocuments", req, res); err != nil {
- return err
- }
- if res.Status == nil || res.Status.GetCode() != pb.SearchServiceError_OK {
- return fmt.Errorf("search: %s: %s", res.Status.GetCode(), res.Status.GetErrorDetail())
- }
- t.listRes = res.Document
- t.listStartID, t.listInclusive, t.more = "", false, nil
- if len(res.Document) != 0 && t.limit <= 0 {
- if id := res.Document[len(res.Document)-1].GetId(); id != "" {
- t.listStartID, t.more = id, moreList
- }
- }
- return nil
-}
-
-// ListOptions are the options for listing documents in an index. Passing a nil
-// *ListOptions is equivalent to using the default values.
-type ListOptions struct {
- // StartID is the inclusive lower bound for the ID of the returned
- // documents. The zero value means all documents will be returned.
- StartID string
-
- // Limit is the maximum number of documents to return. The zero value
- // indicates no limit.
- Limit int
-
- // IDsOnly indicates that only document IDs should be returned for the list
- // operation; no document fields are populated.
- IDsOnly bool
-}
-
-// Search searches the index for the given query.
-func (x *Index) Search(c context.Context, query string, opts *SearchOptions) *Iterator {
- t := &Iterator{
- c: c,
- index: x,
- searchQuery: query,
- more: moreSearch,
- }
- if opts != nil {
- if opts.Cursor != "" {
- if opts.Offset != 0 {
- return errIter("at most one of Cursor and Offset may be specified")
- }
- t.searchCursor = proto.String(string(opts.Cursor))
- }
- t.limit = opts.Limit
- t.fields = opts.Fields
- t.idsOnly = opts.IDsOnly
- t.sort = opts.Sort
- t.exprs = opts.Expressions
- t.refinements = opts.Refinements
- t.facetOpts = opts.Facets
- t.searchOffset = opts.Offset
- }
- return t
-}
-
-func moreSearch(t *Iterator) error {
- // We use per-result (rather than single/per-page) cursors since this
- // lets us return a Cursor for every iterator document. The two cursor
- // types are largely interchangeable: a page cursor is the same as the
- // last per-result cursor in a given search response.
- req := &pb.SearchRequest{
- Params: &pb.SearchParams{
- IndexSpec: &t.index.spec,
- Query: &t.searchQuery,
- Cursor: t.searchCursor,
- CursorType: pb.SearchParams_PER_RESULT.Enum(),
- FieldSpec: &pb.FieldSpec{
- Name: t.fields,
- },
- },
- }
- if t.limit > 0 {
- req.Params.Limit = proto.Int32(int32(t.limit))
- }
- if t.searchOffset > 0 {
- req.Params.Offset = proto.Int32(int32(t.searchOffset))
- t.searchOffset = 0
- }
- if t.idsOnly {
- req.Params.KeysOnly = &t.idsOnly
- }
- if t.sort != nil {
- if err := sortToProto(t.sort, req.Params); err != nil {
- return err
- }
- }
- if t.refinements != nil {
- if err := refinementsToProto(t.refinements, req.Params); err != nil {
- return err
- }
- }
- for _, e := range t.exprs {
- req.Params.FieldSpec.Expression = append(req.Params.FieldSpec.Expression, &pb.FieldSpec_Expression{
- Name: proto.String(e.Name),
- Expression: proto.String(e.Expr),
- })
- }
- for _, f := range t.facetOpts {
- if err := f.setParams(req.Params); err != nil {
- return fmt.Errorf("bad FacetSearchOption: %v", err)
- }
- }
- // Don't repeat facet search.
- t.facetOpts = nil
-
- res := &pb.SearchResponse{}
- if err := internal.Call(t.c, "search", "Search", req, res); err != nil {
- return err
- }
- if res.Status == nil || res.Status.GetCode() != pb.SearchServiceError_OK {
- return fmt.Errorf("search: %s: %s", res.Status.GetCode(), res.Status.GetErrorDetail())
- }
- t.searchRes = res.Result
- if len(res.FacetResult) > 0 {
- t.facetRes = res.FacetResult
- }
- t.count = int(*res.MatchedCount)
- if t.limit > 0 {
- t.more = nil
- } else {
- t.more = moreSearch
- }
- return nil
-}
-
-// SearchOptions are the options for searching an index. Passing a nil
-// *SearchOptions is equivalent to using the default values.
-type SearchOptions struct {
- // Limit is the maximum number of documents to return. The zero value
- // indicates no limit.
- Limit int
-
- // IDsOnly indicates that only document IDs should be returned for the search
- // operation; no document fields are populated.
- IDsOnly bool
-
- // Sort controls the ordering of search results.
- Sort *SortOptions
-
- // Fields specifies which document fields to include in the results. If omitted,
- // all document fields are returned. No more than 100 fields may be specified.
- Fields []string
-
- // Expressions specifies additional computed fields to add to each returned
- // document.
- Expressions []FieldExpression
-
- // Facets controls what facet information is returned for these search results.
- // If no options are specified, no facet results will be returned.
- Facets []FacetSearchOption
-
- // Refinements filters the returned documents by requiring them to contain facets
- // with specific values. Refinements are applied in conjunction for facets with
- // different names, and in disjunction otherwise.
- Refinements []Facet
-
- // Cursor causes the results to commence with the first document after
- // the document associated with the cursor.
- Cursor Cursor
-
- // Offset specifies the number of documents to skip over before returning results.
- // When specified, Cursor must be nil.
- Offset int
-}
-
-// Cursor represents an iterator's position.
-//
-// The string value of a cursor is web-safe. It can be saved and restored
-// for later use.
-type Cursor string
-
-// FieldExpression defines a custom expression to evaluate for each result.
-type FieldExpression struct {
- // Name is the name to use for the computed field.
- Name string
-
- // Expr is evaluated to provide a custom content snippet for each document.
- // See https://cloud.google.com/appengine/docs/go/search/options for
- // the supported expression syntax.
- Expr string
-}
-
-// FacetSearchOption controls what facet information is returned in search results.
-type FacetSearchOption interface {
- setParams(*pb.SearchParams) error
-}
-
-// AutoFacetDiscovery returns a FacetSearchOption which enables automatic facet
-// discovery for the search. Automatic facet discovery looks for the facets
-// which appear the most often in the aggregate in the matched documents.
-//
-// The maximum number of facets returned is controlled by facetLimit, and the
-// maximum number of values per facet by facetLimit. A limit of zero indicates
-// a default limit should be used.
-func AutoFacetDiscovery(facetLimit, valueLimit int) FacetSearchOption {
- return &autoFacetOpt{facetLimit, valueLimit}
-}
-
-type autoFacetOpt struct {
- facetLimit, valueLimit int
-}
-
-const defaultAutoFacetLimit = 10 // As per python runtime search.py.
-
-func (o *autoFacetOpt) setParams(params *pb.SearchParams) error {
- lim := int32(o.facetLimit)
- if lim == 0 {
- lim = defaultAutoFacetLimit
- }
- params.AutoDiscoverFacetCount = &lim
- if o.valueLimit > 0 {
- params.FacetAutoDetectParam = &pb.FacetAutoDetectParam{
- ValueLimit: proto.Int32(int32(o.valueLimit)),
- }
- }
- return nil
-}
-
-// FacetDiscovery returns a FacetSearchOption which selects a facet to be
-// returned with the search results. By default, the most frequently
-// occurring values for that facet will be returned. However, you can also
-// specify a list of particular Atoms or specific Ranges to return.
-func FacetDiscovery(name string, value ...interface{}) FacetSearchOption {
- return &facetOpt{name, value}
-}
-
-type facetOpt struct {
- name string
- values []interface{}
-}
-
-func (o *facetOpt) setParams(params *pb.SearchParams) error {
- req := &pb.FacetRequest{Name: &o.name}
- params.IncludeFacet = append(params.IncludeFacet, req)
- if len(o.values) == 0 {
- return nil
- }
- vtype := reflect.TypeOf(o.values[0])
- reqParam := &pb.FacetRequestParam{}
- for _, v := range o.values {
- if reflect.TypeOf(v) != vtype {
- return errors.New("values must all be Atom, or must all be Range")
- }
- switch v := v.(type) {
- case Atom:
- reqParam.ValueConstraint = append(reqParam.ValueConstraint, string(v))
- case Range:
- rng, err := rangeToProto(v)
- if err != nil {
- return fmt.Errorf("invalid range: %v", err)
- }
- reqParam.Range = append(reqParam.Range, rng)
- default:
- return fmt.Errorf("unsupported value type %T", v)
- }
- }
- req.Params = reqParam
- return nil
-}
-
-// FacetDocumentDepth returns a FacetSearchOption which controls the number of
-// documents to be evaluated with preparing facet results.
-func FacetDocumentDepth(depth int) FacetSearchOption {
- return facetDepthOpt(depth)
-}
-
-type facetDepthOpt int
-
-func (o facetDepthOpt) setParams(params *pb.SearchParams) error {
- params.FacetDepth = proto.Int32(int32(o))
- return nil
-}
-
-// FacetResult represents the number of times a particular facet and value
-// appeared in the documents matching a search request.
-type FacetResult struct {
- Facet
-
- // Count is the number of times this specific facet and value appeared in the
- // matching documents.
- Count int
-}
-
-// Range represents a numeric range with inclusive start and exclusive end.
-// Start may be specified as math.Inf(-1) to indicate there is no minimum
-// value, and End may similarly be specified as math.Inf(1); at least one of
-// Start or End must be a finite number.
-type Range struct {
- Start, End float64
-}
-
-var (
- negInf = math.Inf(-1)
- posInf = math.Inf(1)
-)
-
-// AtLeast returns a Range matching any value greater than, or equal to, min.
-func AtLeast(min float64) Range {
- return Range{Start: min, End: posInf}
-}
-
-// LessThan returns a Range matching any value less than max.
-func LessThan(max float64) Range {
- return Range{Start: negInf, End: max}
-}
-
-// SortOptions control the ordering and scoring of search results.
-type SortOptions struct {
- // Expressions is a slice of expressions representing a multi-dimensional
- // sort.
- Expressions []SortExpression
-
- // Scorer, when specified, will cause the documents to be scored according to
- // search term frequency.
- Scorer Scorer
-
- // Limit is the maximum number of objects to score and/or sort. Limit cannot
- // be more than 10,000. The zero value indicates a default limit.
- Limit int
-}
-
-// SortExpression defines a single dimension for sorting a document.
-type SortExpression struct {
- // Expr is evaluated to provide a sorting value for each document.
- // See https://cloud.google.com/appengine/docs/go/search/options for
- // the supported expression syntax.
- Expr string
-
- // Reverse causes the documents to be sorted in ascending order.
- Reverse bool
-
- // The default value to use when no field is present or the expresion
- // cannot be calculated for a document. For text sorts, Default must
- // be of type string; for numeric sorts, float64.
- Default interface{}
-}
-
-// A Scorer defines how a document is scored.
-type Scorer interface {
- toProto(*pb.ScorerSpec)
-}
-
-type enumScorer struct {
- enum pb.ScorerSpec_Scorer
-}
-
-func (e enumScorer) toProto(spec *pb.ScorerSpec) {
- spec.Scorer = e.enum.Enum()
-}
-
-var (
- // MatchScorer assigns a score based on term frequency in a document.
- MatchScorer Scorer = enumScorer{pb.ScorerSpec_MATCH_SCORER}
-
- // RescoringMatchScorer assigns a score based on the quality of the query
- // match. It is similar to a MatchScorer but uses a more complex scoring
- // algorithm based on match term frequency and other factors like field type.
- // Please be aware that this algorithm is continually refined and can change
- // over time without notice. This means that the ordering of search results
- // that use this scorer can also change without notice.
- RescoringMatchScorer Scorer = enumScorer{pb.ScorerSpec_RESCORING_MATCH_SCORER}
-)
-
-func sortToProto(sort *SortOptions, params *pb.SearchParams) error {
- for _, e := range sort.Expressions {
- spec := &pb.SortSpec{
- SortExpression: proto.String(e.Expr),
- }
- if e.Reverse {
- spec.SortDescending = proto.Bool(false)
- }
- if e.Default != nil {
- switch d := e.Default.(type) {
- case float64:
- spec.DefaultValueNumeric = &d
- case string:
- spec.DefaultValueText = &d
- default:
- return fmt.Errorf("search: invalid Default type %T for expression %q", d, e.Expr)
- }
- }
- params.SortSpec = append(params.SortSpec, spec)
- }
-
- spec := &pb.ScorerSpec{}
- if sort.Limit > 0 {
- spec.Limit = proto.Int32(int32(sort.Limit))
- params.ScorerSpec = spec
- }
- if sort.Scorer != nil {
- sort.Scorer.toProto(spec)
- params.ScorerSpec = spec
- }
-
- return nil
-}
-
-func refinementsToProto(refinements []Facet, params *pb.SearchParams) error {
- for _, r := range refinements {
- ref := &pb.FacetRefinement{
- Name: proto.String(r.Name),
- }
- switch v := r.Value.(type) {
- case Atom:
- ref.Value = proto.String(string(v))
- case Range:
- rng, err := rangeToProto(v)
- if err != nil {
- return fmt.Errorf("search: refinement for facet %q: %v", r.Name, err)
- }
- // Unfortunately there are two identical messages for identify Facet ranges.
- ref.Range = &pb.FacetRefinement_Range{Start: rng.Start, End: rng.End}
- default:
- return fmt.Errorf("search: unsupported refinement for facet %q of type %T", r.Name, v)
- }
- params.FacetRefinement = append(params.FacetRefinement, ref)
- }
- return nil
-}
-
-func rangeToProto(r Range) (*pb.FacetRange, error) {
- rng := &pb.FacetRange{}
- if r.Start != negInf {
- if !validFloat(r.Start) {
- return nil, errors.New("invalid value for Start")
- }
- rng.Start = proto.String(strconv.FormatFloat(r.Start, 'e', -1, 64))
- } else if r.End == posInf {
- return nil, errors.New("either Start or End must be finite")
- }
- if r.End != posInf {
- if !validFloat(r.End) {
- return nil, errors.New("invalid value for End")
- }
- rng.End = proto.String(strconv.FormatFloat(r.End, 'e', -1, 64))
- }
- return rng, nil
-}
-
-func protoToRange(rng *pb.FacetRefinement_Range) Range {
- r := Range{Start: negInf, End: posInf}
- if x, err := strconv.ParseFloat(rng.GetStart(), 64); err != nil {
- r.Start = x
- }
- if x, err := strconv.ParseFloat(rng.GetEnd(), 64); err != nil {
- r.End = x
- }
- return r
-}
-
-// Iterator is the result of searching an index for a query or listing an
-// index.
-type Iterator struct {
- c context.Context
- index *Index
- err error
-
- listRes []*pb.Document
- listStartID string
- listInclusive bool
-
- searchRes []*pb.SearchResult
- facetRes []*pb.FacetResult
- searchQuery string
- searchCursor *string
- searchOffset int
- sort *SortOptions
-
- fields []string
- exprs []FieldExpression
- refinements []Facet
- facetOpts []FacetSearchOption
-
- more func(*Iterator) error
-
- count int
- limit int // items left to return; 0 for unlimited.
- idsOnly bool
-}
-
-// errIter returns an iterator that only returns the given error.
-func errIter(err string) *Iterator {
- return &Iterator{
- err: errors.New(err),
- }
-}
-
-// Done is returned when a query iteration has completed.
-var Done = errors.New("search: query has no more results")
-
-// Count returns an approximation of the number of documents matched by the
-// query. It is only valid to call for iterators returned by Search.
-func (t *Iterator) Count() int { return t.count }
-
-// fetchMore retrieves more results, if there are no errors or pending results.
-func (t *Iterator) fetchMore() {
- if t.err == nil && len(t.listRes)+len(t.searchRes) == 0 && t.more != nil {
- t.err = t.more(t)
- }
-}
-
-// Next returns the ID of the next result. When there are no more results,
-// Done is returned as the error.
-//
-// dst must be a non-nil struct pointer, implement the FieldLoadSaver
-// interface, or be a nil interface value. If a non-nil dst is provided, it
-// will be filled with the indexed fields. dst is ignored if this iterator was
-// created with an IDsOnly option.
-func (t *Iterator) Next(dst interface{}) (string, error) {
- t.fetchMore()
- if t.err != nil {
- return "", t.err
- }
-
- var doc *pb.Document
- var exprs []*pb.Field
- switch {
- case len(t.listRes) != 0:
- doc = t.listRes[0]
- t.listRes = t.listRes[1:]
- case len(t.searchRes) != 0:
- doc = t.searchRes[0].Document
- exprs = t.searchRes[0].Expression
- t.searchCursor = t.searchRes[0].Cursor
- t.searchRes = t.searchRes[1:]
- default:
- return "", Done
- }
- if doc == nil {
- return "", errors.New("search: internal error: no document returned")
- }
- if !t.idsOnly && dst != nil {
- if err := loadDoc(dst, doc, exprs); err != nil {
- return "", err
- }
- }
- return doc.GetId(), nil
-}
-
-// Cursor returns the cursor associated with the current document (that is,
-// the document most recently returned by a call to Next).
-//
-// Passing this cursor in a future call to Search will cause those results
-// to commence with the first document after the current document.
-func (t *Iterator) Cursor() Cursor {
- if t.searchCursor == nil {
- return ""
- }
- return Cursor(*t.searchCursor)
-}
-
-// Facets returns the facets found within the search results, if any facets
-// were requested in the SearchOptions.
-func (t *Iterator) Facets() ([][]FacetResult, error) {
- t.fetchMore()
- if t.err != nil && t.err != Done {
- return nil, t.err
- }
-
- var facets [][]FacetResult
- for _, f := range t.facetRes {
- fres := make([]FacetResult, 0, len(f.Value))
- for _, v := range f.Value {
- ref := v.Refinement
- facet := FacetResult{
- Facet: Facet{Name: ref.GetName()},
- Count: int(v.GetCount()),
- }
- if ref.Value != nil {
- facet.Value = Atom(*ref.Value)
- } else {
- facet.Value = protoToRange(ref.Range)
- }
- fres = append(fres, facet)
- }
- facets = append(facets, fres)
- }
- return facets, nil
-}
-
-// saveDoc converts from a struct pointer or
-// FieldLoadSaver/FieldMetadataLoadSaver to the Document protobuf.
-func saveDoc(src interface{}) (*pb.Document, error) {
- var err error
- var fields []Field
- var meta *DocumentMetadata
- switch x := src.(type) {
- case FieldLoadSaver:
- fields, meta, err = x.Save()
- default:
- fields, err = SaveStruct(src)
- }
- if err != nil {
- return nil, err
- }
-
- fieldsProto, err := fieldsToProto(fields)
- if err != nil {
- return nil, err
- }
- d := &pb.Document{
- Field: fieldsProto,
- OrderId: proto.Int32(int32(time.Since(orderIDEpoch).Seconds())),
- }
- if meta != nil {
- if meta.Rank != 0 {
- if !validDocRank(meta.Rank) {
- return nil, fmt.Errorf("search: invalid rank %d, must be [0, 2^31)", meta.Rank)
- }
- *d.OrderId = int32(meta.Rank)
- }
- if len(meta.Facets) > 0 {
- facets, err := facetsToProto(meta.Facets)
- if err != nil {
- return nil, err
- }
- d.Facet = facets
- }
- }
- return d, nil
-}
-
-func fieldsToProto(src []Field) ([]*pb.Field, error) {
- // Maps to catch duplicate time or numeric fields.
- timeFields, numericFields := make(map[string]bool), make(map[string]bool)
- dst := make([]*pb.Field, 0, len(src))
- for _, f := range src {
- if !validFieldName(f.Name) {
- return nil, fmt.Errorf("search: invalid field name %q", f.Name)
- }
- fieldValue := &pb.FieldValue{}
- switch x := f.Value.(type) {
- case string:
- fieldValue.Type = pb.FieldValue_TEXT.Enum()
- fieldValue.StringValue = proto.String(x)
- case Atom:
- fieldValue.Type = pb.FieldValue_ATOM.Enum()
- fieldValue.StringValue = proto.String(string(x))
- case HTML:
- fieldValue.Type = pb.FieldValue_HTML.Enum()
- fieldValue.StringValue = proto.String(string(x))
- case time.Time:
- if timeFields[f.Name] {
- return nil, fmt.Errorf("search: duplicate time field %q", f.Name)
- }
- timeFields[f.Name] = true
- fieldValue.Type = pb.FieldValue_DATE.Enum()
- fieldValue.StringValue = proto.String(strconv.FormatInt(x.UnixNano()/1e6, 10))
- case float64:
- if numericFields[f.Name] {
- return nil, fmt.Errorf("search: duplicate numeric field %q", f.Name)
- }
- if !validFloat(x) {
- return nil, fmt.Errorf("search: numeric field %q with invalid value %f", f.Name, x)
- }
- numericFields[f.Name] = true
- fieldValue.Type = pb.FieldValue_NUMBER.Enum()
- fieldValue.StringValue = proto.String(strconv.FormatFloat(x, 'e', -1, 64))
- case appengine.GeoPoint:
- if !x.Valid() {
- return nil, fmt.Errorf(
- "search: GeoPoint field %q with invalid value %v",
- f.Name, x)
- }
- fieldValue.Type = pb.FieldValue_GEO.Enum()
- fieldValue.Geo = &pb.FieldValue_Geo{
- Lat: proto.Float64(x.Lat),
- Lng: proto.Float64(x.Lng),
- }
- default:
- return nil, fmt.Errorf("search: unsupported field type: %v", reflect.TypeOf(f.Value))
- }
- if f.Language != "" {
- switch f.Value.(type) {
- case string, HTML:
- if !validLanguage(f.Language) {
- return nil, fmt.Errorf("search: invalid language for field %q: %q", f.Name, f.Language)
- }
- fieldValue.Language = proto.String(f.Language)
- default:
- return nil, fmt.Errorf("search: setting language not supported for field %q of type %T", f.Name, f.Value)
- }
- }
- if p := fieldValue.StringValue; p != nil && !utf8.ValidString(*p) {
- return nil, fmt.Errorf("search: %q field is invalid UTF-8: %q", f.Name, *p)
- }
- dst = append(dst, &pb.Field{
- Name: proto.String(f.Name),
- Value: fieldValue,
- })
- }
- return dst, nil
-}
-
-func facetsToProto(src []Facet) ([]*pb.Facet, error) {
- dst := make([]*pb.Facet, 0, len(src))
- for _, f := range src {
- if !validFieldName(f.Name) {
- return nil, fmt.Errorf("search: invalid facet name %q", f.Name)
- }
- facetValue := &pb.FacetValue{}
- switch x := f.Value.(type) {
- case Atom:
- if !utf8.ValidString(string(x)) {
- return nil, fmt.Errorf("search: %q facet is invalid UTF-8: %q", f.Name, x)
- }
- facetValue.Type = pb.FacetValue_ATOM.Enum()
- facetValue.StringValue = proto.String(string(x))
- case float64:
- if !validFloat(x) {
- return nil, fmt.Errorf("search: numeric facet %q with invalid value %f", f.Name, x)
- }
- facetValue.Type = pb.FacetValue_NUMBER.Enum()
- facetValue.StringValue = proto.String(strconv.FormatFloat(x, 'e', -1, 64))
- default:
- return nil, fmt.Errorf("search: unsupported facet type: %v", reflect.TypeOf(f.Value))
- }
- dst = append(dst, &pb.Facet{
- Name: proto.String(f.Name),
- Value: facetValue,
- })
- }
- return dst, nil
-}
-
-// loadDoc converts from protobufs to a struct pointer or
-// FieldLoadSaver/FieldMetadataLoadSaver. The src param provides the document's
-// stored fields and facets, and any document metadata. An additional slice of
-// fields, exprs, may optionally be provided to contain any derived expressions
-// requested by the developer.
-func loadDoc(dst interface{}, src *pb.Document, exprs []*pb.Field) (err error) {
- fields, err := protoToFields(src.Field)
- if err != nil {
- return err
- }
- facets, err := protoToFacets(src.Facet)
- if err != nil {
- return err
- }
- if len(exprs) > 0 {
- exprFields, err := protoToFields(exprs)
- if err != nil {
- return err
- }
- // Mark each field as derived.
- for i := range exprFields {
- exprFields[i].Derived = true
- }
- fields = append(fields, exprFields...)
- }
- meta := &DocumentMetadata{
- Rank: int(src.GetOrderId()),
- Facets: facets,
- }
- switch x := dst.(type) {
- case FieldLoadSaver:
- return x.Load(fields, meta)
- default:
- return loadStructWithMeta(dst, fields, meta)
- }
-}
-
-func protoToFields(fields []*pb.Field) ([]Field, error) {
- dst := make([]Field, 0, len(fields))
- for _, field := range fields {
- fieldValue := field.GetValue()
- f := Field{
- Name: field.GetName(),
- }
- switch fieldValue.GetType() {
- case pb.FieldValue_TEXT:
- f.Value = fieldValue.GetStringValue()
- f.Language = fieldValue.GetLanguage()
- case pb.FieldValue_ATOM:
- f.Value = Atom(fieldValue.GetStringValue())
- case pb.FieldValue_HTML:
- f.Value = HTML(fieldValue.GetStringValue())
- f.Language = fieldValue.GetLanguage()
- case pb.FieldValue_DATE:
- sv := fieldValue.GetStringValue()
- millis, err := strconv.ParseInt(sv, 10, 64)
- if err != nil {
- return nil, fmt.Errorf("search: internal error: bad time.Time encoding %q: %v", sv, err)
- }
- f.Value = time.Unix(0, millis*1e6)
- case pb.FieldValue_NUMBER:
- sv := fieldValue.GetStringValue()
- x, err := strconv.ParseFloat(sv, 64)
- if err != nil {
- return nil, err
- }
- f.Value = x
- case pb.FieldValue_GEO:
- geoValue := fieldValue.GetGeo()
- geoPoint := appengine.GeoPoint{geoValue.GetLat(), geoValue.GetLng()}
- if !geoPoint.Valid() {
- return nil, fmt.Errorf("search: internal error: invalid GeoPoint encoding: %v", geoPoint)
- }
- f.Value = geoPoint
- default:
- return nil, fmt.Errorf("search: internal error: unknown data type %s", fieldValue.GetType())
- }
- dst = append(dst, f)
- }
- return dst, nil
-}
-
-func protoToFacets(facets []*pb.Facet) ([]Facet, error) {
- if len(facets) == 0 {
- return nil, nil
- }
- dst := make([]Facet, 0, len(facets))
- for _, facet := range facets {
- facetValue := facet.GetValue()
- f := Facet{
- Name: facet.GetName(),
- }
- switch facetValue.GetType() {
- case pb.FacetValue_ATOM:
- f.Value = Atom(facetValue.GetStringValue())
- case pb.FacetValue_NUMBER:
- sv := facetValue.GetStringValue()
- x, err := strconv.ParseFloat(sv, 64)
- if err != nil {
- return nil, err
- }
- f.Value = x
- default:
- return nil, fmt.Errorf("search: internal error: unknown data type %s", facetValue.GetType())
- }
- dst = append(dst, f)
- }
- return dst, nil
-}
-
-func namespaceMod(m proto.Message, namespace string) {
- set := func(s **string) {
- if *s == nil {
- *s = &namespace
- }
- }
- switch m := m.(type) {
- case *pb.IndexDocumentRequest:
- set(&m.Params.IndexSpec.Namespace)
- case *pb.ListDocumentsRequest:
- set(&m.Params.IndexSpec.Namespace)
- case *pb.DeleteDocumentRequest:
- set(&m.Params.IndexSpec.Namespace)
- case *pb.SearchRequest:
- set(&m.Params.IndexSpec.Namespace)
- }
-}
-
-func init() {
- internal.RegisterErrorCodeMap("search", pb.SearchServiceError_ErrorCode_name)
- internal.NamespaceMods["search"] = namespaceMod
-}
diff --git a/vendor/google.golang.org/appengine/search/struct.go b/vendor/google.golang.org/appengine/search/struct.go
deleted file mode 100644
index 083c97f..0000000
--- a/vendor/google.golang.org/appengine/search/struct.go
+++ /dev/null
@@ -1,245 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package search
-
-import (
- "fmt"
- "reflect"
- "strings"
- "sync"
-)
-
-// ErrFieldMismatch is returned when a field is to be loaded into a different
-// than the one it was stored from, or when a field is missing or unexported in
-// the destination struct.
-type ErrFieldMismatch struct {
- FieldName string
- Reason string
-}
-
-func (e *ErrFieldMismatch) Error() string {
- return fmt.Sprintf("search: cannot load field %q: %s", e.FieldName, e.Reason)
-}
-
-// ErrFacetMismatch is returned when a facet is to be loaded into a different
-// type than the one it was stored from, or when a field is missing or
-// unexported in the destination struct. StructType is the type of the struct
-// pointed to by the destination argument passed to Iterator.Next.
-type ErrFacetMismatch struct {
- StructType reflect.Type
- FacetName string
- Reason string
-}
-
-func (e *ErrFacetMismatch) Error() string {
- return fmt.Sprintf("search: cannot load facet %q into a %q: %s", e.FacetName, e.StructType, e.Reason)
-}
-
-// structCodec defines how to convert a given struct to/from a search document.
-type structCodec struct {
- // byIndex returns the struct tag for the i'th struct field.
- byIndex []structTag
-
- // fieldByName returns the index of the struct field for the given field name.
- fieldByName map[string]int
-
- // facetByName returns the index of the struct field for the given facet name,
- facetByName map[string]int
-}
-
-// structTag holds a structured version of each struct field's parsed tag.
-type structTag struct {
- name string
- facet bool
-}
-
-var (
- codecsMu sync.RWMutex
- codecs = map[reflect.Type]*structCodec{}
-)
-
-func loadCodec(t reflect.Type) (*structCodec, error) {
- codecsMu.RLock()
- codec, ok := codecs[t]
- codecsMu.RUnlock()
- if ok {
- return codec, nil
- }
-
- codecsMu.Lock()
- defer codecsMu.Unlock()
- if codec, ok := codecs[t]; ok {
- return codec, nil
- }
-
- codec = &structCodec{
- fieldByName: make(map[string]int),
- facetByName: make(map[string]int),
- }
-
- for i, I := 0, t.NumField(); i < I; i++ {
- f := t.Field(i)
- name, opts := f.Tag.Get("search"), ""
- if i := strings.Index(name, ","); i != -1 {
- name, opts = name[:i], name[i+1:]
- }
- // TODO(davidday): Support name=="-" as per datastore.
- if name == "" {
- name = f.Name
- } else if !validFieldName(name) {
- return nil, fmt.Errorf("search: struct tag has invalid field name: %q", name)
- }
- facet := opts == "facet"
- codec.byIndex = append(codec.byIndex, structTag{name: name, facet: facet})
- if facet {
- codec.facetByName[name] = i
- } else {
- codec.fieldByName[name] = i
- }
- }
-
- codecs[t] = codec
- return codec, nil
-}
-
-// structFLS adapts a struct to be a FieldLoadSaver.
-type structFLS struct {
- v reflect.Value
- codec *structCodec
-}
-
-func (s structFLS) Load(fields []Field, meta *DocumentMetadata) error {
- var err error
- for _, field := range fields {
- i, ok := s.codec.fieldByName[field.Name]
- if !ok {
- // Note the error, but keep going.
- err = &ErrFieldMismatch{
- FieldName: field.Name,
- Reason: "no such struct field",
- }
- continue
-
- }
- f := s.v.Field(i)
- if !f.CanSet() {
- // Note the error, but keep going.
- err = &ErrFieldMismatch{
- FieldName: field.Name,
- Reason: "cannot set struct field",
- }
- continue
- }
- v := reflect.ValueOf(field.Value)
- if ft, vt := f.Type(), v.Type(); ft != vt {
- err = &ErrFieldMismatch{
- FieldName: field.Name,
- Reason: fmt.Sprintf("type mismatch: %v for %v data", ft, vt),
- }
- continue
- }
- f.Set(v)
- }
- if meta == nil {
- return nil
- }
- for _, facet := range meta.Facets {
- i, ok := s.codec.facetByName[facet.Name]
- if !ok {
- // Note the error, but keep going.
- if err == nil {
- err = &ErrFacetMismatch{
- StructType: s.v.Type(),
- FacetName: facet.Name,
- Reason: "no matching field found",
- }
- }
- continue
- }
- f := s.v.Field(i)
- if !f.CanSet() {
- // Note the error, but keep going.
- if err == nil {
- err = &ErrFacetMismatch{
- StructType: s.v.Type(),
- FacetName: facet.Name,
- Reason: "unable to set unexported field of struct",
- }
- }
- continue
- }
- v := reflect.ValueOf(facet.Value)
- if ft, vt := f.Type(), v.Type(); ft != vt {
- if err == nil {
- err = &ErrFacetMismatch{
- StructType: s.v.Type(),
- FacetName: facet.Name,
- Reason: fmt.Sprintf("type mismatch: %v for %d data", ft, vt),
- }
- continue
- }
- }
- f.Set(v)
- }
- return err
-}
-
-func (s structFLS) Save() ([]Field, *DocumentMetadata, error) {
- fields := make([]Field, 0, len(s.codec.fieldByName))
- var facets []Facet
- for i, tag := range s.codec.byIndex {
- f := s.v.Field(i)
- if !f.CanSet() {
- continue
- }
- if tag.facet {
- facets = append(facets, Facet{Name: tag.name, Value: f.Interface()})
- } else {
- fields = append(fields, Field{Name: tag.name, Value: f.Interface()})
- }
- }
- return fields, &DocumentMetadata{Facets: facets}, nil
-}
-
-// newStructFLS returns a FieldLoadSaver for the struct pointer p.
-func newStructFLS(p interface{}) (FieldLoadSaver, error) {
- v := reflect.ValueOf(p)
- if v.Kind() != reflect.Ptr || v.IsNil() || v.Elem().Kind() != reflect.Struct {
- return nil, ErrInvalidDocumentType
- }
- codec, err := loadCodec(v.Elem().Type())
- if err != nil {
- return nil, err
- }
- return structFLS{v.Elem(), codec}, nil
-}
-
-func loadStructWithMeta(dst interface{}, f []Field, meta *DocumentMetadata) error {
- x, err := newStructFLS(dst)
- if err != nil {
- return err
- }
- return x.Load(f, meta)
-}
-
-func saveStructWithMeta(src interface{}) ([]Field, *DocumentMetadata, error) {
- x, err := newStructFLS(src)
- if err != nil {
- return nil, nil, err
- }
- return x.Save()
-}
-
-// LoadStruct loads the fields from f to dst. dst must be a struct pointer.
-func LoadStruct(dst interface{}, f []Field) error {
- return loadStructWithMeta(dst, f, nil)
-}
-
-// SaveStruct returns the fields from src as a slice of Field.
-// src must be a struct pointer.
-func SaveStruct(src interface{}) ([]Field, error) {
- f, _, err := saveStructWithMeta(src)
- return f, err
-}
diff --git a/vendor/google.golang.org/appengine/socket/doc.go b/vendor/google.golang.org/appengine/socket/doc.go
deleted file mode 100644
index 1e23553..0000000
--- a/vendor/google.golang.org/appengine/socket/doc.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package socket provides outbound network sockets.
-//
-// This package is only required in the classic App Engine environment.
-// Applications running only in the Managed VM hosting environment should
-// use the standard library's net package.
-package socket
diff --git a/vendor/google.golang.org/appengine/socket/socket_classic.go b/vendor/google.golang.org/appengine/socket/socket_classic.go
deleted file mode 100644
index 0ad50e2..0000000
--- a/vendor/google.golang.org/appengine/socket/socket_classic.go
+++ /dev/null
@@ -1,290 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build appengine
-
-package socket
-
-import (
- "fmt"
- "io"
- "net"
- "strconv"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
- "google.golang.org/appengine/internal"
-
- pb "google.golang.org/appengine/internal/socket"
-)
-
-// Dial connects to the address addr on the network protocol.
-// The address format is host:port, where host may be a hostname or an IP address.
-// Known protocols are "tcp" and "udp".
-// The returned connection satisfies net.Conn, and is valid while ctx is valid;
-// if the connection is to be used after ctx becomes invalid, invoke SetContext
-// with the new context.
-func Dial(ctx context.Context, protocol, addr string) (*Conn, error) {
- return DialTimeout(ctx, protocol, addr, 0)
-}
-
-var ipFamilies = []pb.CreateSocketRequest_SocketFamily{
- pb.CreateSocketRequest_IPv4,
- pb.CreateSocketRequest_IPv6,
-}
-
-// DialTimeout is like Dial but takes a timeout.
-// The timeout includes name resolution, if required.
-func DialTimeout(ctx context.Context, protocol, addr string, timeout time.Duration) (*Conn, error) {
- dialCtx := ctx // Used for dialing and name resolution, but not stored in the *Conn.
- if timeout > 0 {
- var cancel context.CancelFunc
- dialCtx, cancel = context.WithTimeout(ctx, timeout)
- defer cancel()
- }
-
- host, portStr, err := net.SplitHostPort(addr)
- if err != nil {
- return nil, err
- }
- port, err := strconv.Atoi(portStr)
- if err != nil {
- return nil, fmt.Errorf("socket: bad port %q: %v", portStr, err)
- }
-
- var prot pb.CreateSocketRequest_SocketProtocol
- switch protocol {
- case "tcp":
- prot = pb.CreateSocketRequest_TCP
- case "udp":
- prot = pb.CreateSocketRequest_UDP
- default:
- return nil, fmt.Errorf("socket: unknown protocol %q", protocol)
- }
-
- packedAddrs, resolved, err := resolve(dialCtx, ipFamilies, host)
- if err != nil {
- return nil, fmt.Errorf("socket: failed resolving %q: %v", host, err)
- }
- if len(packedAddrs) == 0 {
- return nil, fmt.Errorf("no addresses for %q", host)
- }
-
- packedAddr := packedAddrs[0] // use first address
- fam := pb.CreateSocketRequest_IPv4
- if len(packedAddr) == net.IPv6len {
- fam = pb.CreateSocketRequest_IPv6
- }
-
- req := &pb.CreateSocketRequest{
- Family: fam.Enum(),
- Protocol: prot.Enum(),
- RemoteIp: &pb.AddressPort{
- Port: proto.Int32(int32(port)),
- PackedAddress: packedAddr,
- },
- }
- if resolved {
- req.RemoteIp.HostnameHint = &host
- }
- res := &pb.CreateSocketReply{}
- if err := internal.Call(dialCtx, "remote_socket", "CreateSocket", req, res); err != nil {
- return nil, err
- }
-
- return &Conn{
- ctx: ctx,
- desc: res.GetSocketDescriptor(),
- prot: prot,
- local: res.ProxyExternalIp,
- remote: req.RemoteIp,
- }, nil
-}
-
-// LookupIP returns the given host's IP addresses.
-func LookupIP(ctx context.Context, host string) (addrs []net.IP, err error) {
- packedAddrs, _, err := resolve(ctx, ipFamilies, host)
- if err != nil {
- return nil, fmt.Errorf("socket: failed resolving %q: %v", host, err)
- }
- addrs = make([]net.IP, len(packedAddrs))
- for i, pa := range packedAddrs {
- addrs[i] = net.IP(pa)
- }
- return addrs, nil
-}
-
-func resolve(ctx context.Context, fams []pb.CreateSocketRequest_SocketFamily, host string) ([][]byte, bool, error) {
- // Check if it's an IP address.
- if ip := net.ParseIP(host); ip != nil {
- if ip := ip.To4(); ip != nil {
- return [][]byte{ip}, false, nil
- }
- return [][]byte{ip}, false, nil
- }
-
- req := &pb.ResolveRequest{
- Name: &host,
- AddressFamilies: fams,
- }
- res := &pb.ResolveReply{}
- if err := internal.Call(ctx, "remote_socket", "Resolve", req, res); err != nil {
- // XXX: need to map to pb.ResolveReply_ErrorCode?
- return nil, false, err
- }
- return res.PackedAddress, true, nil
-}
-
-// withDeadline is like context.WithDeadline, except it ignores the zero deadline.
-func withDeadline(parent context.Context, deadline time.Time) (context.Context, context.CancelFunc) {
- if deadline.IsZero() {
- return parent, func() {}
- }
- return context.WithDeadline(parent, deadline)
-}
-
-// Conn represents a socket connection.
-// It implements net.Conn.
-type Conn struct {
- ctx context.Context
- desc string
- offset int64
-
- prot pb.CreateSocketRequest_SocketProtocol
- local, remote *pb.AddressPort
-
- readDeadline, writeDeadline time.Time // optional
-}
-
-// SetContext sets the context that is used by this Conn.
-// It is usually used only when using a Conn that was created in a different context,
-// such as when a connection is created during a warmup request but used while
-// servicing a user request.
-func (cn *Conn) SetContext(ctx context.Context) {
- cn.ctx = ctx
-}
-
-func (cn *Conn) Read(b []byte) (n int, err error) {
- const maxRead = 1 << 20
- if len(b) > maxRead {
- b = b[:maxRead]
- }
-
- req := &pb.ReceiveRequest{
- SocketDescriptor: &cn.desc,
- DataSize: proto.Int32(int32(len(b))),
- }
- res := &pb.ReceiveReply{}
- if !cn.readDeadline.IsZero() {
- req.TimeoutSeconds = proto.Float64(cn.readDeadline.Sub(time.Now()).Seconds())
- }
- ctx, cancel := withDeadline(cn.ctx, cn.readDeadline)
- defer cancel()
- if err := internal.Call(ctx, "remote_socket", "Receive", req, res); err != nil {
- return 0, err
- }
- if len(res.Data) == 0 {
- return 0, io.EOF
- }
- if len(res.Data) > len(b) {
- return 0, fmt.Errorf("socket: internal error: read too much data: %d > %d", len(res.Data), len(b))
- }
- return copy(b, res.Data), nil
-}
-
-func (cn *Conn) Write(b []byte) (n int, err error) {
- const lim = 1 << 20 // max per chunk
-
- for n < len(b) {
- chunk := b[n:]
- if len(chunk) > lim {
- chunk = chunk[:lim]
- }
-
- req := &pb.SendRequest{
- SocketDescriptor: &cn.desc,
- Data: chunk,
- StreamOffset: &cn.offset,
- }
- res := &pb.SendReply{}
- if !cn.writeDeadline.IsZero() {
- req.TimeoutSeconds = proto.Float64(cn.writeDeadline.Sub(time.Now()).Seconds())
- }
- ctx, cancel := withDeadline(cn.ctx, cn.writeDeadline)
- defer cancel()
- if err = internal.Call(ctx, "remote_socket", "Send", req, res); err != nil {
- // assume zero bytes were sent in this RPC
- break
- }
- n += int(res.GetDataSent())
- cn.offset += int64(res.GetDataSent())
- }
-
- return
-}
-
-func (cn *Conn) Close() error {
- req := &pb.CloseRequest{
- SocketDescriptor: &cn.desc,
- }
- res := &pb.CloseReply{}
- if err := internal.Call(cn.ctx, "remote_socket", "Close", req, res); err != nil {
- return err
- }
- cn.desc = "CLOSED"
- return nil
-}
-
-func addr(prot pb.CreateSocketRequest_SocketProtocol, ap *pb.AddressPort) net.Addr {
- if ap == nil {
- return nil
- }
- switch prot {
- case pb.CreateSocketRequest_TCP:
- return &net.TCPAddr{
- IP: net.IP(ap.PackedAddress),
- Port: int(*ap.Port),
- }
- case pb.CreateSocketRequest_UDP:
- return &net.UDPAddr{
- IP: net.IP(ap.PackedAddress),
- Port: int(*ap.Port),
- }
- }
- panic("unknown protocol " + prot.String())
-}
-
-func (cn *Conn) LocalAddr() net.Addr { return addr(cn.prot, cn.local) }
-func (cn *Conn) RemoteAddr() net.Addr { return addr(cn.prot, cn.remote) }
-
-func (cn *Conn) SetDeadline(t time.Time) error {
- cn.readDeadline = t
- cn.writeDeadline = t
- return nil
-}
-
-func (cn *Conn) SetReadDeadline(t time.Time) error {
- cn.readDeadline = t
- return nil
-}
-
-func (cn *Conn) SetWriteDeadline(t time.Time) error {
- cn.writeDeadline = t
- return nil
-}
-
-// KeepAlive signals that the connection is still in use.
-// It may be called to prevent the socket being closed due to inactivity.
-func (cn *Conn) KeepAlive() error {
- req := &pb.GetSocketNameRequest{
- SocketDescriptor: &cn.desc,
- }
- res := &pb.GetSocketNameReply{}
- return internal.Call(cn.ctx, "remote_socket", "GetSocketName", req, res)
-}
-
-func init() {
- internal.RegisterErrorCodeMap("remote_socket", pb.RemoteSocketServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/socket/socket_vm.go b/vendor/google.golang.org/appengine/socket/socket_vm.go
deleted file mode 100644
index ed98ac2..0000000
--- a/vendor/google.golang.org/appengine/socket/socket_vm.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package socket
-
-import (
- "net"
- "time"
-
- "golang.org/x/net/context"
-)
-
-// Dial connects to the address addr on the network protocol.
-// The address format is host:port, where host may be a hostname or an IP address.
-// Known protocols are "tcp" and "udp".
-// The returned connection satisfies net.Conn, and is valid while ctx is valid;
-// if the connection is to be used after ctx becomes invalid, invoke SetContext
-// with the new context.
-func Dial(ctx context.Context, protocol, addr string) (*Conn, error) {
- conn, err := net.Dial(protocol, addr)
- if err != nil {
- return nil, err
- }
- return &Conn{conn}, nil
-}
-
-// DialTimeout is like Dial but takes a timeout.
-// The timeout includes name resolution, if required.
-func DialTimeout(ctx context.Context, protocol, addr string, timeout time.Duration) (*Conn, error) {
- conn, err := net.DialTimeout(protocol, addr, timeout)
- if err != nil {
- return nil, err
- }
- return &Conn{conn}, nil
-}
-
-// LookupIP returns the given host's IP addresses.
-func LookupIP(ctx context.Context, host string) (addrs []net.IP, err error) {
- return net.LookupIP(host)
-}
-
-// Conn represents a socket connection.
-// It implements net.Conn.
-type Conn struct {
- net.Conn
-}
-
-// SetContext sets the context that is used by this Conn.
-// It is usually used only when using a Conn that was created in a different context,
-// such as when a connection is created during a warmup request but used while
-// servicing a user request.
-func (cn *Conn) SetContext(ctx context.Context) {
- // This function is not required on managed VMs.
-}
-
-// KeepAlive signals that the connection is still in use.
-// It may be called to prevent the socket being closed due to inactivity.
-func (cn *Conn) KeepAlive() error {
- // This function is not required on managed VMs.
- return nil
-}
diff --git a/vendor/google.golang.org/appengine/taskqueue/taskqueue.go b/vendor/google.golang.org/appengine/taskqueue/taskqueue.go
deleted file mode 100644
index 7bdae92..0000000
--- a/vendor/google.golang.org/appengine/taskqueue/taskqueue.go
+++ /dev/null
@@ -1,496 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package taskqueue provides a client for App Engine's taskqueue service.
-Using this service, applications may perform work outside a user's request.
-
-A Task may be constructed manually; alternatively, since the most common
-taskqueue operation is to add a single POST task, NewPOSTTask makes it easy.
-
- t := taskqueue.NewPOSTTask("/worker", url.Values{
- "key": {key},
- })
- taskqueue.Add(c, t, "") // add t to the default queue
-*/
-package taskqueue
-
-import (
- "errors"
- "fmt"
- "net/http"
- "net/url"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- dspb "google.golang.org/appengine/internal/datastore"
- pb "google.golang.org/appengine/internal/taskqueue"
-)
-
-var (
- // ErrTaskAlreadyAdded is the error returned by Add and AddMulti when a task has already been added with a particular name.
- ErrTaskAlreadyAdded = errors.New("taskqueue: task has already been added")
-)
-
-// RetryOptions let you control whether to retry a task and the backoff intervals between tries.
-type RetryOptions struct {
- // Number of tries/leases after which the task fails permanently and is deleted.
- // If AgeLimit is also set, both limits must be exceeded for the task to fail permanently.
- RetryLimit int32
-
- // Maximum time allowed since the task's first try before the task fails permanently and is deleted (only for push tasks).
- // If RetryLimit is also set, both limits must be exceeded for the task to fail permanently.
- AgeLimit time.Duration
-
- // Minimum time between successive tries (only for push tasks).
- MinBackoff time.Duration
-
- // Maximum time between successive tries (only for push tasks).
- MaxBackoff time.Duration
-
- // Maximum number of times to double the interval between successive tries before the intervals increase linearly (only for push tasks).
- MaxDoublings int32
-
- // If MaxDoublings is zero, set ApplyZeroMaxDoublings to true to override the default non-zero value.
- // Otherwise a zero MaxDoublings is ignored and the default is used.
- ApplyZeroMaxDoublings bool
-}
-
-// toRetryParameter converts RetryOptions to pb.TaskQueueRetryParameters.
-func (opt *RetryOptions) toRetryParameters() *pb.TaskQueueRetryParameters {
- params := &pb.TaskQueueRetryParameters{}
- if opt.RetryLimit > 0 {
- params.RetryLimit = proto.Int32(opt.RetryLimit)
- }
- if opt.AgeLimit > 0 {
- params.AgeLimitSec = proto.Int64(int64(opt.AgeLimit.Seconds()))
- }
- if opt.MinBackoff > 0 {
- params.MinBackoffSec = proto.Float64(opt.MinBackoff.Seconds())
- }
- if opt.MaxBackoff > 0 {
- params.MaxBackoffSec = proto.Float64(opt.MaxBackoff.Seconds())
- }
- if opt.MaxDoublings > 0 || (opt.MaxDoublings == 0 && opt.ApplyZeroMaxDoublings) {
- params.MaxDoublings = proto.Int32(opt.MaxDoublings)
- }
- return params
-}
-
-// A Task represents a task to be executed.
-type Task struct {
- // Path is the worker URL for the task.
- // If unset, it will default to /_ah/queue/.
- Path string
-
- // Payload is the data for the task.
- // This will be delivered as the HTTP request body.
- // It is only used when Method is POST, PUT or PULL.
- // url.Values' Encode method may be used to generate this for POST requests.
- Payload []byte
-
- // Additional HTTP headers to pass at the task's execution time.
- // To schedule the task to be run with an alternate app version
- // or backend, set the "Host" header.
- Header http.Header
-
- // Method is the HTTP method for the task ("GET", "POST", etc.),
- // or "PULL" if this is task is destined for a pull-based queue.
- // If empty, this defaults to "POST".
- Method string
-
- // A name for the task.
- // If empty, a name will be chosen.
- Name string
-
- // Delay specifies the duration the task queue service must wait
- // before executing the task.
- // Either Delay or ETA may be set, but not both.
- Delay time.Duration
-
- // ETA specifies the earliest time a task may be executed (push queues)
- // or leased (pull queues).
- // Either Delay or ETA may be set, but not both.
- ETA time.Time
-
- // The number of times the task has been dispatched or leased.
- RetryCount int32
-
- // Tag for the task. Only used when Method is PULL.
- Tag string
-
- // Retry options for this task. May be nil.
- RetryOptions *RetryOptions
-}
-
-func (t *Task) method() string {
- if t.Method == "" {
- return "POST"
- }
- return t.Method
-}
-
-// NewPOSTTask creates a Task that will POST to a path with the given form data.
-func NewPOSTTask(path string, params url.Values) *Task {
- h := make(http.Header)
- h.Set("Content-Type", "application/x-www-form-urlencoded")
- return &Task{
- Path: path,
- Payload: []byte(params.Encode()),
- Header: h,
- Method: "POST",
- }
-}
-
-var (
- currentNamespace = http.CanonicalHeaderKey("X-AppEngine-Current-Namespace")
- defaultNamespace = http.CanonicalHeaderKey("X-AppEngine-Default-Namespace")
-)
-
-func getDefaultNamespace(ctx context.Context) string {
- return internal.IncomingHeaders(ctx).Get(defaultNamespace)
-}
-
-func newAddReq(c context.Context, task *Task, queueName string) (*pb.TaskQueueAddRequest, error) {
- if queueName == "" {
- queueName = "default"
- }
- path := task.Path
- if path == "" {
- path = "/_ah/queue/" + queueName
- }
- eta := task.ETA
- if eta.IsZero() {
- eta = time.Now().Add(task.Delay)
- } else if task.Delay != 0 {
- panic("taskqueue: both Delay and ETA are set")
- }
- req := &pb.TaskQueueAddRequest{
- QueueName: []byte(queueName),
- TaskName: []byte(task.Name),
- EtaUsec: proto.Int64(eta.UnixNano() / 1e3),
- }
- method := task.method()
- if method == "PULL" {
- // Pull-based task
- req.Body = task.Payload
- req.Mode = pb.TaskQueueMode_PULL.Enum()
- if task.Tag != "" {
- req.Tag = []byte(task.Tag)
- }
- } else {
- // HTTP-based task
- if v, ok := pb.TaskQueueAddRequest_RequestMethod_value[method]; ok {
- req.Method = pb.TaskQueueAddRequest_RequestMethod(v).Enum()
- } else {
- return nil, fmt.Errorf("taskqueue: bad method %q", method)
- }
- req.Url = []byte(path)
- for k, vs := range task.Header {
- for _, v := range vs {
- req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
- Key: []byte(k),
- Value: []byte(v),
- })
- }
- }
- if method == "POST" || method == "PUT" {
- req.Body = task.Payload
- }
-
- // Namespace headers.
- if _, ok := task.Header[currentNamespace]; !ok {
- // Fetch the current namespace of this request.
- ns := internal.NamespaceFromContext(c)
- req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
- Key: []byte(currentNamespace),
- Value: []byte(ns),
- })
- }
- if _, ok := task.Header[defaultNamespace]; !ok {
- // Fetch the X-AppEngine-Default-Namespace header of this request.
- if ns := getDefaultNamespace(c); ns != "" {
- req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
- Key: []byte(defaultNamespace),
- Value: []byte(ns),
- })
- }
- }
- }
-
- if task.RetryOptions != nil {
- req.RetryParameters = task.RetryOptions.toRetryParameters()
- }
-
- return req, nil
-}
-
-var alreadyAddedErrors = map[pb.TaskQueueServiceError_ErrorCode]bool{
- pb.TaskQueueServiceError_TASK_ALREADY_EXISTS: true,
- pb.TaskQueueServiceError_TOMBSTONED_TASK: true,
-}
-
-// Add adds the task to a named queue.
-// An empty queue name means that the default queue will be used.
-// Add returns an equivalent Task with defaults filled in, including setting
-// the task's Name field to the chosen name if the original was empty.
-func Add(c context.Context, task *Task, queueName string) (*Task, error) {
- req, err := newAddReq(c, task, queueName)
- if err != nil {
- return nil, err
- }
- res := &pb.TaskQueueAddResponse{}
- if err := internal.Call(c, "taskqueue", "Add", req, res); err != nil {
- apiErr, ok := err.(*internal.APIError)
- if ok && alreadyAddedErrors[pb.TaskQueueServiceError_ErrorCode(apiErr.Code)] {
- return nil, ErrTaskAlreadyAdded
- }
- return nil, err
- }
- resultTask := *task
- resultTask.Method = task.method()
- if task.Name == "" {
- resultTask.Name = string(res.ChosenTaskName)
- }
- return &resultTask, nil
-}
-
-// AddMulti adds multiple tasks to a named queue.
-// An empty queue name means that the default queue will be used.
-// AddMulti returns a slice of equivalent tasks with defaults filled in, including setting
-// each task's Name field to the chosen name if the original was empty.
-// If a given task is badly formed or could not be added, an appengine.MultiError is returned.
-func AddMulti(c context.Context, tasks []*Task, queueName string) ([]*Task, error) {
- req := &pb.TaskQueueBulkAddRequest{
- AddRequest: make([]*pb.TaskQueueAddRequest, len(tasks)),
- }
- me, any := make(appengine.MultiError, len(tasks)), false
- for i, t := range tasks {
- req.AddRequest[i], me[i] = newAddReq(c, t, queueName)
- any = any || me[i] != nil
- }
- if any {
- return nil, me
- }
- res := &pb.TaskQueueBulkAddResponse{}
- if err := internal.Call(c, "taskqueue", "BulkAdd", req, res); err != nil {
- return nil, err
- }
- if len(res.Taskresult) != len(tasks) {
- return nil, errors.New("taskqueue: server error")
- }
- tasksOut := make([]*Task, len(tasks))
- for i, tr := range res.Taskresult {
- tasksOut[i] = new(Task)
- *tasksOut[i] = *tasks[i]
- tasksOut[i].Method = tasksOut[i].method()
- if tasksOut[i].Name == "" {
- tasksOut[i].Name = string(tr.ChosenTaskName)
- }
- if *tr.Result != pb.TaskQueueServiceError_OK {
- if alreadyAddedErrors[*tr.Result] {
- me[i] = ErrTaskAlreadyAdded
- } else {
- me[i] = &internal.APIError{
- Service: "taskqueue",
- Code: int32(*tr.Result),
- }
- }
- any = true
- }
- }
- if any {
- return tasksOut, me
- }
- return tasksOut, nil
-}
-
-// Delete deletes a task from a named queue.
-func Delete(c context.Context, task *Task, queueName string) error {
- err := DeleteMulti(c, []*Task{task}, queueName)
- if me, ok := err.(appengine.MultiError); ok {
- return me[0]
- }
- return err
-}
-
-// DeleteMulti deletes multiple tasks from a named queue.
-// If a given task could not be deleted, an appengine.MultiError is returned.
-func DeleteMulti(c context.Context, tasks []*Task, queueName string) error {
- taskNames := make([][]byte, len(tasks))
- for i, t := range tasks {
- taskNames[i] = []byte(t.Name)
- }
- if queueName == "" {
- queueName = "default"
- }
- req := &pb.TaskQueueDeleteRequest{
- QueueName: []byte(queueName),
- TaskName: taskNames,
- }
- res := &pb.TaskQueueDeleteResponse{}
- if err := internal.Call(c, "taskqueue", "Delete", req, res); err != nil {
- return err
- }
- if a, b := len(req.TaskName), len(res.Result); a != b {
- return fmt.Errorf("taskqueue: internal error: requested deletion of %d tasks, got %d results", a, b)
- }
- me, any := make(appengine.MultiError, len(res.Result)), false
- for i, ec := range res.Result {
- if ec != pb.TaskQueueServiceError_OK {
- me[i] = &internal.APIError{
- Service: "taskqueue",
- Code: int32(ec),
- }
- any = true
- }
- }
- if any {
- return me
- }
- return nil
-}
-
-func lease(c context.Context, maxTasks int, queueName string, leaseTime int, groupByTag bool, tag []byte) ([]*Task, error) {
- if queueName == "" {
- queueName = "default"
- }
- req := &pb.TaskQueueQueryAndOwnTasksRequest{
- QueueName: []byte(queueName),
- LeaseSeconds: proto.Float64(float64(leaseTime)),
- MaxTasks: proto.Int64(int64(maxTasks)),
- GroupByTag: proto.Bool(groupByTag),
- Tag: tag,
- }
- res := &pb.TaskQueueQueryAndOwnTasksResponse{}
- if err := internal.Call(c, "taskqueue", "QueryAndOwnTasks", req, res); err != nil {
- return nil, err
- }
- tasks := make([]*Task, len(res.Task))
- for i, t := range res.Task {
- tasks[i] = &Task{
- Payload: t.Body,
- Name: string(t.TaskName),
- Method: "PULL",
- ETA: time.Unix(0, *t.EtaUsec*1e3),
- RetryCount: *t.RetryCount,
- Tag: string(t.Tag),
- }
- }
- return tasks, nil
-}
-
-// Lease leases tasks from a queue.
-// leaseTime is in seconds.
-// The number of tasks fetched will be at most maxTasks.
-func Lease(c context.Context, maxTasks int, queueName string, leaseTime int) ([]*Task, error) {
- return lease(c, maxTasks, queueName, leaseTime, false, nil)
-}
-
-// LeaseByTag leases tasks from a queue, grouped by tag.
-// If tag is empty, then the returned tasks are grouped by the tag of the task with earliest ETA.
-// leaseTime is in seconds.
-// The number of tasks fetched will be at most maxTasks.
-func LeaseByTag(c context.Context, maxTasks int, queueName string, leaseTime int, tag string) ([]*Task, error) {
- return lease(c, maxTasks, queueName, leaseTime, true, []byte(tag))
-}
-
-// Purge removes all tasks from a queue.
-func Purge(c context.Context, queueName string) error {
- if queueName == "" {
- queueName = "default"
- }
- req := &pb.TaskQueuePurgeQueueRequest{
- QueueName: []byte(queueName),
- }
- res := &pb.TaskQueuePurgeQueueResponse{}
- return internal.Call(c, "taskqueue", "PurgeQueue", req, res)
-}
-
-// ModifyLease modifies the lease of a task.
-// Used to request more processing time, or to abandon processing.
-// leaseTime is in seconds and must not be negative.
-func ModifyLease(c context.Context, task *Task, queueName string, leaseTime int) error {
- if queueName == "" {
- queueName = "default"
- }
- req := &pb.TaskQueueModifyTaskLeaseRequest{
- QueueName: []byte(queueName),
- TaskName: []byte(task.Name),
- EtaUsec: proto.Int64(task.ETA.UnixNano() / 1e3), // Used to verify ownership.
- LeaseSeconds: proto.Float64(float64(leaseTime)),
- }
- res := &pb.TaskQueueModifyTaskLeaseResponse{}
- if err := internal.Call(c, "taskqueue", "ModifyTaskLease", req, res); err != nil {
- return err
- }
- task.ETA = time.Unix(0, *res.UpdatedEtaUsec*1e3)
- return nil
-}
-
-// QueueStatistics represents statistics about a single task queue.
-type QueueStatistics struct {
- Tasks int // may be an approximation
- OldestETA time.Time // zero if there are no pending tasks
-
- Executed1Minute int // tasks executed in the last minute
- InFlight int // tasks executing now
- EnforcedRate float64 // requests per second
-}
-
-// QueueStats retrieves statistics about queues.
-func QueueStats(c context.Context, queueNames []string) ([]QueueStatistics, error) {
- req := &pb.TaskQueueFetchQueueStatsRequest{
- QueueName: make([][]byte, len(queueNames)),
- }
- for i, q := range queueNames {
- if q == "" {
- q = "default"
- }
- req.QueueName[i] = []byte(q)
- }
- res := &pb.TaskQueueFetchQueueStatsResponse{}
- if err := internal.Call(c, "taskqueue", "FetchQueueStats", req, res); err != nil {
- return nil, err
- }
- qs := make([]QueueStatistics, len(res.Queuestats))
- for i, qsg := range res.Queuestats {
- qs[i] = QueueStatistics{
- Tasks: int(*qsg.NumTasks),
- }
- if eta := *qsg.OldestEtaUsec; eta > -1 {
- qs[i].OldestETA = time.Unix(0, eta*1e3)
- }
- if si := qsg.ScannerInfo; si != nil {
- qs[i].Executed1Minute = int(*si.ExecutedLastMinute)
- qs[i].InFlight = int(si.GetRequestsInFlight())
- qs[i].EnforcedRate = si.GetEnforcedRate()
- }
- }
- return qs, nil
-}
-
-func setTransaction(x *pb.TaskQueueAddRequest, t *dspb.Transaction) {
- x.Transaction = t
-}
-
-func init() {
- internal.RegisterErrorCodeMap("taskqueue", pb.TaskQueueServiceError_ErrorCode_name)
-
- // Datastore error codes are shifted by DATASTORE_ERROR when presented through taskqueue.
- dsCode := int32(pb.TaskQueueServiceError_DATASTORE_ERROR) + int32(dspb.Error_TIMEOUT)
- internal.RegisterTimeoutErrorCode("taskqueue", dsCode)
-
- // Transaction registration.
- internal.RegisterTransactionSetter(setTransaction)
- internal.RegisterTransactionSetter(func(x *pb.TaskQueueBulkAddRequest, t *dspb.Transaction) {
- for _, req := range x.AddRequest {
- setTransaction(req, t)
- }
- })
-}
diff --git a/vendor/google.golang.org/appengine/timeout.go b/vendor/google.golang.org/appengine/timeout.go
deleted file mode 100644
index 05642a9..0000000
--- a/vendor/google.golang.org/appengine/timeout.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package appengine
-
-import "golang.org/x/net/context"
-
-// IsTimeoutError reports whether err is a timeout error.
-func IsTimeoutError(err error) bool {
- if err == context.DeadlineExceeded {
- return true
- }
- if t, ok := err.(interface {
- IsTimeout() bool
- }); ok {
- return t.IsTimeout()
- }
- return false
-}
diff --git a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
deleted file mode 100644
index ba3d17c..0000000
--- a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package urlfetch provides an http.RoundTripper implementation
-// for fetching URLs via App Engine's urlfetch service.
-package urlfetch
-
-import (
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "net/url"
- "strconv"
- "strings"
- "time"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/urlfetch"
-)
-
-// Transport is an implementation of http.RoundTripper for
-// App Engine. Users should generally create an http.Client using
-// this transport and use the Client rather than using this transport
-// directly.
-type Transport struct {
- Context context.Context
-
- // Controls whether the application checks the validity of SSL certificates
- // over HTTPS connections. A value of false (the default) instructs the
- // application to send a request to the server only if the certificate is
- // valid and signed by a trusted certificate authority (CA), and also
- // includes a hostname that matches the certificate. A value of true
- // instructs the application to perform no certificate validation.
- AllowInvalidServerCertificate bool
-}
-
-// Verify statically that *Transport implements http.RoundTripper.
-var _ http.RoundTripper = (*Transport)(nil)
-
-// Client returns an *http.Client using a default urlfetch Transport. This
-// client will have the default deadline of 5 seconds, and will check the
-// validity of SSL certificates.
-//
-// Any deadline of the provided context will be used for requests through this client;
-// if the client does not have a deadline then a 5 second default is used.
-func Client(ctx context.Context) *http.Client {
- return &http.Client{
- Transport: &Transport{
- Context: ctx,
- },
- }
-}
-
-type bodyReader struct {
- content []byte
- truncated bool
- closed bool
-}
-
-// ErrTruncatedBody is the error returned after the final Read() from a
-// response's Body if the body has been truncated by App Engine's proxy.
-var ErrTruncatedBody = errors.New("urlfetch: truncated body")
-
-func statusCodeToText(code int) string {
- if t := http.StatusText(code); t != "" {
- return t
- }
- return strconv.Itoa(code)
-}
-
-func (br *bodyReader) Read(p []byte) (n int, err error) {
- if br.closed {
- if br.truncated {
- return 0, ErrTruncatedBody
- }
- return 0, io.EOF
- }
- n = copy(p, br.content)
- if n > 0 {
- br.content = br.content[n:]
- return
- }
- if br.truncated {
- br.closed = true
- return 0, ErrTruncatedBody
- }
- return 0, io.EOF
-}
-
-func (br *bodyReader) Close() error {
- br.closed = true
- br.content = nil
- return nil
-}
-
-// A map of the URL Fetch-accepted methods that take a request body.
-var methodAcceptsRequestBody = map[string]bool{
- "POST": true,
- "PUT": true,
- "PATCH": true,
-}
-
-// urlString returns a valid string given a URL. This function is necessary because
-// the String method of URL doesn't correctly handle URLs with non-empty Opaque values.
-// See http://code.google.com/p/go/issues/detail?id=4860.
-func urlString(u *url.URL) string {
- if u.Opaque == "" || strings.HasPrefix(u.Opaque, "//") {
- return u.String()
- }
- aux := *u
- aux.Opaque = "//" + aux.Host + aux.Opaque
- return aux.String()
-}
-
-// RoundTrip issues a single HTTP request and returns its response. Per the
-// http.RoundTripper interface, RoundTrip only returns an error if there
-// was an unsupported request or the URL Fetch proxy fails.
-// Note that HTTP response codes such as 5xx, 403, 404, etc are not
-// errors as far as the transport is concerned and will be returned
-// with err set to nil.
-func (t *Transport) RoundTrip(req *http.Request) (res *http.Response, err error) {
- methNum, ok := pb.URLFetchRequest_RequestMethod_value[req.Method]
- if !ok {
- return nil, fmt.Errorf("urlfetch: unsupported HTTP method %q", req.Method)
- }
-
- method := pb.URLFetchRequest_RequestMethod(methNum)
-
- freq := &pb.URLFetchRequest{
- Method: &method,
- Url: proto.String(urlString(req.URL)),
- FollowRedirects: proto.Bool(false), // http.Client's responsibility
- MustValidateServerCertificate: proto.Bool(!t.AllowInvalidServerCertificate),
- }
- if deadline, ok := t.Context.Deadline(); ok {
- freq.Deadline = proto.Float64(deadline.Sub(time.Now()).Seconds())
- }
-
- for k, vals := range req.Header {
- for _, val := range vals {
- freq.Header = append(freq.Header, &pb.URLFetchRequest_Header{
- Key: proto.String(k),
- Value: proto.String(val),
- })
- }
- }
- if methodAcceptsRequestBody[req.Method] && req.Body != nil {
- // Avoid a []byte copy if req.Body has a Bytes method.
- switch b := req.Body.(type) {
- case interface {
- Bytes() []byte
- }:
- freq.Payload = b.Bytes()
- default:
- freq.Payload, err = ioutil.ReadAll(req.Body)
- if err != nil {
- return nil, err
- }
- }
- }
-
- fres := &pb.URLFetchResponse{}
- if err := internal.Call(t.Context, "urlfetch", "Fetch", freq, fres); err != nil {
- return nil, err
- }
-
- res = &http.Response{}
- res.StatusCode = int(*fres.StatusCode)
- res.Status = fmt.Sprintf("%d %s", res.StatusCode, statusCodeToText(res.StatusCode))
- res.Header = make(http.Header)
- res.Request = req
-
- // Faked:
- res.ProtoMajor = 1
- res.ProtoMinor = 1
- res.Proto = "HTTP/1.1"
- res.Close = true
-
- for _, h := range fres.Header {
- hkey := http.CanonicalHeaderKey(*h.Key)
- hval := *h.Value
- if hkey == "Content-Length" {
- // Will get filled in below for all but HEAD requests.
- if req.Method == "HEAD" {
- res.ContentLength, _ = strconv.ParseInt(hval, 10, 64)
- }
- continue
- }
- res.Header.Add(hkey, hval)
- }
-
- if req.Method != "HEAD" {
- res.ContentLength = int64(len(fres.Content))
- }
-
- truncated := fres.GetContentWasTruncated()
- res.Body = &bodyReader{content: fres.Content, truncated: truncated}
- return
-}
-
-func init() {
- internal.RegisterErrorCodeMap("urlfetch", pb.URLFetchServiceError_ErrorCode_name)
- internal.RegisterTimeoutErrorCode("urlfetch", int32(pb.URLFetchServiceError_DEADLINE_EXCEEDED))
-}
diff --git a/vendor/google.golang.org/appengine/user/oauth.go b/vendor/google.golang.org/appengine/user/oauth.go
deleted file mode 100644
index ffad571..0000000
--- a/vendor/google.golang.org/appengine/user/oauth.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-package user
-
-import (
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/user"
-)
-
-// CurrentOAuth returns the user associated with the OAuth consumer making this
-// request. If the OAuth consumer did not make a valid OAuth request, or the
-// scopes is non-empty and the current user does not have at least one of the
-// scopes, this method will return an error.
-func CurrentOAuth(c context.Context, scopes ...string) (*User, error) {
- req := &pb.GetOAuthUserRequest{}
- if len(scopes) != 1 || scopes[0] != "" {
- // The signature for this function used to be CurrentOAuth(Context, string).
- // Ignore the singular "" scope to preserve existing behavior.
- req.Scopes = scopes
- }
-
- res := &pb.GetOAuthUserResponse{}
-
- err := internal.Call(c, "user", "GetOAuthUser", req, res)
- if err != nil {
- return nil, err
- }
- return &User{
- Email: *res.Email,
- AuthDomain: *res.AuthDomain,
- Admin: res.GetIsAdmin(),
- ID: *res.UserId,
- ClientID: res.GetClientId(),
- }, nil
-}
-
-// OAuthConsumerKey returns the OAuth consumer key provided with the current
-// request. This method will return an error if the OAuth request was invalid.
-func OAuthConsumerKey(c context.Context) (string, error) {
- req := &pb.CheckOAuthSignatureRequest{}
- res := &pb.CheckOAuthSignatureResponse{}
-
- err := internal.Call(c, "user", "CheckOAuthSignature", req, res)
- if err != nil {
- return "", err
- }
- return *res.OauthConsumerKey, err
-}
diff --git a/vendor/google.golang.org/appengine/user/user.go b/vendor/google.golang.org/appengine/user/user.go
deleted file mode 100644
index 622b610..0000000
--- a/vendor/google.golang.org/appengine/user/user.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// Package user provides a client for App Engine's user authentication service.
-package user
-
-import (
- "strings"
-
- "github.com/golang/protobuf/proto"
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/user"
-)
-
-// User represents a user of the application.
-type User struct {
- Email string
- AuthDomain string
- Admin bool
-
- // ID is the unique permanent ID of the user.
- // It is populated if the Email is associated
- // with a Google account, or empty otherwise.
- ID string
-
- // ClientID is the ID of the pre-registered client so its identity can be verified.
- // See https://developers.google.com/console/help/#generatingoauth2 for more information.
- ClientID string
-
- FederatedIdentity string
- FederatedProvider string
-}
-
-// String returns a displayable name for the user.
-func (u *User) String() string {
- if u.AuthDomain != "" && strings.HasSuffix(u.Email, "@"+u.AuthDomain) {
- return u.Email[:len(u.Email)-len("@"+u.AuthDomain)]
- }
- if u.FederatedIdentity != "" {
- return u.FederatedIdentity
- }
- return u.Email
-}
-
-// LoginURL returns a URL that, when visited, prompts the user to sign in,
-// then redirects the user to the URL specified by dest.
-func LoginURL(c context.Context, dest string) (string, error) {
- return LoginURLFederated(c, dest, "")
-}
-
-// LoginURLFederated is like LoginURL but accepts a user's OpenID identifier.
-func LoginURLFederated(c context.Context, dest, identity string) (string, error) {
- req := &pb.CreateLoginURLRequest{
- DestinationUrl: proto.String(dest),
- }
- if identity != "" {
- req.FederatedIdentity = proto.String(identity)
- }
- res := &pb.CreateLoginURLResponse{}
- if err := internal.Call(c, "user", "CreateLoginURL", req, res); err != nil {
- return "", err
- }
- return *res.LoginUrl, nil
-}
-
-// LogoutURL returns a URL that, when visited, signs the user out,
-// then redirects the user to the URL specified by dest.
-func LogoutURL(c context.Context, dest string) (string, error) {
- req := &pb.CreateLogoutURLRequest{
- DestinationUrl: proto.String(dest),
- }
- res := &pb.CreateLogoutURLResponse{}
- if err := internal.Call(c, "user", "CreateLogoutURL", req, res); err != nil {
- return "", err
- }
- return *res.LogoutUrl, nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("user", pb.UserServiceError_ErrorCode_name)
-}
diff --git a/vendor/google.golang.org/appengine/user/user_classic.go b/vendor/google.golang.org/appengine/user/user_classic.go
deleted file mode 100644
index a747ef3..0000000
--- a/vendor/google.golang.org/appengine/user/user_classic.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build appengine
-
-package user
-
-import (
- "appengine/user"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-func Current(ctx context.Context) *User {
- u := user.Current(internal.ClassicContextFromContext(ctx))
- if u == nil {
- return nil
- }
- // Map appengine/user.User to this package's User type.
- return &User{
- Email: u.Email,
- AuthDomain: u.AuthDomain,
- Admin: u.Admin,
- ID: u.ID,
- FederatedIdentity: u.FederatedIdentity,
- FederatedProvider: u.FederatedProvider,
- }
-}
-
-func IsAdmin(ctx context.Context) bool {
- return user.IsAdmin(internal.ClassicContextFromContext(ctx))
-}
diff --git a/vendor/google.golang.org/appengine/user/user_vm.go b/vendor/google.golang.org/appengine/user/user_vm.go
deleted file mode 100644
index 8dc672e..0000000
--- a/vendor/google.golang.org/appengine/user/user_vm.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-// +build !appengine
-
-package user
-
-import (
- "golang.org/x/net/context"
-
- "google.golang.org/appengine/internal"
-)
-
-// Current returns the currently logged-in user,
-// or nil if the user is not signed in.
-func Current(c context.Context) *User {
- h := internal.IncomingHeaders(c)
- u := &User{
- Email: h.Get("X-AppEngine-User-Email"),
- AuthDomain: h.Get("X-AppEngine-Auth-Domain"),
- ID: h.Get("X-AppEngine-User-Id"),
- Admin: h.Get("X-AppEngine-User-Is-Admin") == "1",
- FederatedIdentity: h.Get("X-AppEngine-Federated-Identity"),
- FederatedProvider: h.Get("X-AppEngine-Federated-Provider"),
- }
- if u.Email == "" && u.FederatedIdentity == "" {
- return nil
- }
- return u
-}
-
-// IsAdmin returns true if the current user is signed in and
-// is currently registered as an administrator of the application.
-func IsAdmin(c context.Context) bool {
- h := internal.IncomingHeaders(c)
- return h.Get("X-AppEngine-User-Is-Admin") == "1"
-}
diff --git a/vendor/google.golang.org/appengine/xmpp/xmpp.go b/vendor/google.golang.org/appengine/xmpp/xmpp.go
deleted file mode 100644
index bbc2753..0000000
--- a/vendor/google.golang.org/appengine/xmpp/xmpp.go
+++ /dev/null
@@ -1,253 +0,0 @@
-// Copyright 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by the Apache 2.0
-// license that can be found in the LICENSE file.
-
-/*
-Package xmpp provides the means to send and receive instant messages
-to and from users of XMPP-compatible services.
-
-To send a message,
- m := &xmpp.Message{
- To: []string{"kaylee@example.com"},
- Body: `Hi! How's the carrot?`,
- }
- err := m.Send(c)
-
-To receive messages,
- func init() {
- xmpp.Handle(handleChat)
- }
-
- func handleChat(c context.Context, m *xmpp.Message) {
- // ...
- }
-*/
-package xmpp
-
-import (
- "errors"
- "fmt"
- "net/http"
-
- "golang.org/x/net/context"
-
- "google.golang.org/appengine"
- "google.golang.org/appengine/internal"
- pb "google.golang.org/appengine/internal/xmpp"
-)
-
-// Message represents an incoming chat message.
-type Message struct {
- // Sender is the JID of the sender.
- // Optional for outgoing messages.
- Sender string
-
- // To is the intended recipients of the message.
- // Incoming messages will have exactly one element.
- To []string
-
- // Body is the body of the message.
- Body string
-
- // Type is the message type, per RFC 3921.
- // It defaults to "chat".
- Type string
-
- // RawXML is whether the body contains raw XML.
- RawXML bool
-}
-
-// Presence represents an outgoing presence update.
-type Presence struct {
- // Sender is the JID (optional).
- Sender string
-
- // The intended recipient of the presence update.
- To string
-
- // Type, per RFC 3921 (optional). Defaults to "available".
- Type string
-
- // State of presence (optional).
- // Valid values: "away", "chat", "xa", "dnd" (RFC 3921).
- State string
-
- // Free text status message (optional).
- Status string
-}
-
-var (
- ErrPresenceUnavailable = errors.New("xmpp: presence unavailable")
- ErrInvalidJID = errors.New("xmpp: invalid JID")
-)
-
-// Handle arranges for f to be called for incoming XMPP messages.
-// Only messages of type "chat" or "normal" will be handled.
-func Handle(f func(c context.Context, m *Message)) {
- http.HandleFunc("/_ah/xmpp/message/chat/", func(_ http.ResponseWriter, r *http.Request) {
- f(appengine.NewContext(r), &Message{
- Sender: r.FormValue("from"),
- To: []string{r.FormValue("to")},
- Body: r.FormValue("body"),
- })
- })
-}
-
-// Send sends a message.
-// If any failures occur with specific recipients, the error will be an appengine.MultiError.
-func (m *Message) Send(c context.Context) error {
- req := &pb.XmppMessageRequest{
- Jid: m.To,
- Body: &m.Body,
- RawXml: &m.RawXML,
- }
- if m.Type != "" && m.Type != "chat" {
- req.Type = &m.Type
- }
- if m.Sender != "" {
- req.FromJid = &m.Sender
- }
- res := &pb.XmppMessageResponse{}
- if err := internal.Call(c, "xmpp", "SendMessage", req, res); err != nil {
- return err
- }
-
- if len(res.Status) != len(req.Jid) {
- return fmt.Errorf("xmpp: sent message to %d JIDs, but only got %d statuses back", len(req.Jid), len(res.Status))
- }
- me, any := make(appengine.MultiError, len(req.Jid)), false
- for i, st := range res.Status {
- if st != pb.XmppMessageResponse_NO_ERROR {
- me[i] = errors.New(st.String())
- any = true
- }
- }
- if any {
- return me
- }
- return nil
-}
-
-// Invite sends an invitation. If the from address is an empty string
-// the default (yourapp@appspot.com/bot) will be used.
-func Invite(c context.Context, to, from string) error {
- req := &pb.XmppInviteRequest{
- Jid: &to,
- }
- if from != "" {
- req.FromJid = &from
- }
- res := &pb.XmppInviteResponse{}
- return internal.Call(c, "xmpp", "SendInvite", req, res)
-}
-
-// Send sends a presence update.
-func (p *Presence) Send(c context.Context) error {
- req := &pb.XmppSendPresenceRequest{
- Jid: &p.To,
- }
- if p.State != "" {
- req.Show = &p.State
- }
- if p.Type != "" {
- req.Type = &p.Type
- }
- if p.Sender != "" {
- req.FromJid = &p.Sender
- }
- if p.Status != "" {
- req.Status = &p.Status
- }
- res := &pb.XmppSendPresenceResponse{}
- return internal.Call(c, "xmpp", "SendPresence", req, res)
-}
-
-var presenceMap = map[pb.PresenceResponse_SHOW]string{
- pb.PresenceResponse_NORMAL: "",
- pb.PresenceResponse_AWAY: "away",
- pb.PresenceResponse_DO_NOT_DISTURB: "dnd",
- pb.PresenceResponse_CHAT: "chat",
- pb.PresenceResponse_EXTENDED_AWAY: "xa",
-}
-
-// GetPresence retrieves a user's presence.
-// If the from address is an empty string the default
-// (yourapp@appspot.com/bot) will be used.
-// Possible return values are "", "away", "dnd", "chat", "xa".
-// ErrPresenceUnavailable is returned if the presence is unavailable.
-func GetPresence(c context.Context, to string, from string) (string, error) {
- req := &pb.PresenceRequest{
- Jid: &to,
- }
- if from != "" {
- req.FromJid = &from
- }
- res := &pb.PresenceResponse{}
- if err := internal.Call(c, "xmpp", "GetPresence", req, res); err != nil {
- return "", err
- }
- if !*res.IsAvailable || res.Presence == nil {
- return "", ErrPresenceUnavailable
- }
- presence, ok := presenceMap[*res.Presence]
- if ok {
- return presence, nil
- }
- return "", fmt.Errorf("xmpp: unknown presence %v", *res.Presence)
-}
-
-// GetPresenceMulti retrieves multiple users' presence.
-// If the from address is an empty string the default
-// (yourapp@appspot.com/bot) will be used.
-// Possible return values are "", "away", "dnd", "chat", "xa".
-// If any presence is unavailable, an appengine.MultiError is returned
-func GetPresenceMulti(c context.Context, to []string, from string) ([]string, error) {
- req := &pb.BulkPresenceRequest{
- Jid: to,
- }
- if from != "" {
- req.FromJid = &from
- }
- res := &pb.BulkPresenceResponse{}
-
- if err := internal.Call(c, "xmpp", "BulkGetPresence", req, res); err != nil {
- return nil, err
- }
-
- presences := make([]string, 0, len(res.PresenceResponse))
- errs := appengine.MultiError{}
-
- addResult := func(presence string, err error) {
- presences = append(presences, presence)
- errs = append(errs, err)
- }
-
- anyErr := false
- for _, subres := range res.PresenceResponse {
- if !subres.GetValid() {
- anyErr = true
- addResult("", ErrInvalidJID)
- continue
- }
- if !*subres.IsAvailable || subres.Presence == nil {
- anyErr = true
- addResult("", ErrPresenceUnavailable)
- continue
- }
- presence, ok := presenceMap[*subres.Presence]
- if ok {
- addResult(presence, nil)
- } else {
- anyErr = true
- addResult("", fmt.Errorf("xmpp: unknown presence %q", *subres.Presence))
- }
- }
- if anyErr {
- return presences, errs
- }
- return presences, nil
-}
-
-func init() {
- internal.RegisterErrorCodeMap("xmpp", pb.XmppServiceError_ErrorCode_name)
-}