diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index e227fcc..0000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,17 +0,0 @@ -version: 2.1 - -orbs: - moul: moul/build@1.14.0 # https://github.com/moul/build - -workflows: - main: - jobs: - - moul/golang-build: - gopkg: moul.io/protoc-gen-gotemplate - - moul/golang-build: - gopkg: moul.io/protoc-gen-gotemplate - tag: '1.12' - - moul/golang-build: - gopkg: moul.io/protoc-gen-gotemplate - tag: '1.11' - - moul/docker-build diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index fc720d6..0000000 --- a/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -Dockerfile -*~ -.git/ diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2aa7027..968eb12 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @moul +* @unisack-org diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 3115d73..0000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,6 +0,0 @@ -#github: ["moul"] -patreon: moul -open_collective: moul -custom: -- "https://www.buymeacoffee.com/moul" -- "https://manfred.life/donate" diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/custom.md deleted file mode 100644 index 9917109..0000000 --- a/.github/ISSUE_TEMPLATE/custom.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: Custom -about: 'Anything else: questions, discussions, thanks, ascii-arts, ...' -title: '' -labels: discussion -assignees: moul - ---- diff --git a/.releaserc.js b/.releaserc.js deleted file mode 100644 index 566642f..0000000 --- a/.releaserc.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - branch: 'master', - plugins: [ - '@semantic-release/commit-analyzer', - '@semantic-release/release-notes-generator', - '@semantic-release/github', - ], -}; diff --git a/AUTHORS b/AUTHORS index f692c65..830544e 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,5 +1,4 @@ # This file lists all individuals having contributed content to the repository. -# For how it is generated, see 'https://github.com/moul/rules.mk' Alexandre Beslic Anastasia DERUELLE @@ -26,3 +25,4 @@ Tommy PAGEARD Valerio Gheri Victor Login webii +Vasiliy Tolstov diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 33fefe4..0000000 --- a/Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -# builder -FROM golang:1.15-alpine as builder -RUN apk --no-cache add make git go rsync libc-dev -RUN go get -u golang.org/x/tools/cmd/goimports -RUN go get -u github.com/gobuffalo/packr/v2/packr2 -COPY . /go/src/moul.io/protoc-gen-gotemplate -WORKDIR /go/src/moul.io/protoc-gen-gotemplate -RUN packr2 -RUN go install -a -tags netgo -ldflags '-w -extldflags "-static"' . ./cmd/web-editor -RUN ls -la /go/bin - -# runtime -FROM znly/protoc:0.4.0 -COPY --from=builder /go/bin/web-editor /go/bin/ -COPY --from=builder /go/bin/protoc-gen-gotemplate /go/bin/ -ENV PATH=$PATH:/go/bin -EXPOSE 8080 -ENTRYPOINT [] diff --git a/LICENSE b/LICENSE index f663c46..59b4d49 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,8 @@ MIT License -Copyright (c) 2016 Manfred Touron +Copyright (c) 2016-2021 Manfred Touron +Copyright (c) 2021 Unistack LLC + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Makefile b/Makefile deleted file mode 100644 index 2f8bb81..0000000 --- a/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -GOPKG ?= moul.io/protoc-gen-gotemplate -DOCKER_IMAGE ?= moul/protoc-gen-gotemplate -GOBINS ?= . ./cmd/web-editor -GOLIBS ?= . - -all: test install - -include rules.mk - -.PHONY: examples -examples: install - cd examples/time && make - cd examples/enum && make - cd examples/import && make - cd examples/dummy && make - cd examples/flow && make - cd examples/concat && make - cd examples/flow && make - cd examples/sitemap && make - cd examples/go-generate && make - #cd examples/single-package-mode && make - cd examples/helpers && make - cd examples/arithmetics && make - #cd examples/go-kit && make diff --git a/assets.go b/assets.go new file mode 100644 index 0000000..a8a7e42 --- /dev/null +++ b/assets.go @@ -0,0 +1,22 @@ +// +build dev + +package main + +import ( + "log" + + "github.com/shurcooL/vfsgen" + "github.com/unistack-org/protoc-gen-micro/assets" +) + +func main() { + err := vfsgen.Generate(assets.Assets, vfsgen.Options{ + PackageName: "assets", + BuildTags: "!dev", + VariableName: "Assets", + Filename: "assets/vfsdata.go", + }) + if err != nil { + log.Fatal(err) + } +} diff --git a/assets/assets b/assets/assets deleted file mode 100644 index 8247d2f..0000000 --- a/assets/assets +++ /dev/null @@ -1,6 +0,0 @@ - /Users/moul/go/src/moul.io/protoc-gen-gotemplate/assets: - total used in directory 552 available 9223372036849978910 - drwxr-xr-x 4 moul staff 128 Dec 9 11:31 . - lrwxr-xr-x 1 moul staff 33 Dec 9 11:31 .#web-editor.jpg -> moul@manfred-spacegray.local.9471 - drwxr-xr-x 21 moul staff 672 Sep 13 18:05 .. - -rw-r--r--@ 1 moul staff 280357 Oct 26 2017 web-editor.jpg diff --git a/assets/assets.go b/assets/assets.go new file mode 100644 index 0000000..0312b5f --- /dev/null +++ b/assets/assets.go @@ -0,0 +1,20 @@ +// +build dev + +package assets + +import ( + "go/build" + "log" + "net/http" +) + +func importPathToDir(importPath string) string { + p, err := build.Import(importPath, "", build.FindOnly) + if err != nil { + log.Fatal(err) + } + return p.Dir +} + +// Assets contains the project's assets. +var Assets http.FileSystem = http.Dir("./templates") diff --git a/assets/stub.go b/assets/stub.go new file mode 100644 index 0000000..b1b684d --- /dev/null +++ b/assets/stub.go @@ -0,0 +1 @@ +package assets diff --git a/assets/vfsdata.go b/assets/vfsdata.go new file mode 100644 index 0000000..8365d38 --- /dev/null +++ b/assets/vfsdata.go @@ -0,0 +1,202 @@ +// Code generated by vfsgen; DO NOT EDIT. + +// +build !dev + +package assets + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "net/http" + "os" + pathpkg "path" + "time" +) + +// Assets statically implements the virtual filesystem provided to vfsgen. +var Assets = func() http.FileSystem { + fs := vfsgen۰FS{ + "/": &vfsgen۰DirInfo{ + name: "/", + modTime: time.Date(2021, 1, 8, 19, 16, 4, 19648059, time.UTC), + }, + "/{{.File.Package}}_micro.pb.go.tmpl": &vfsgen۰CompressedFileInfo{ + name: "{{.File.Package}}_micro.pb.go.tmpl", + modTime: time.Date(2021, 1, 8, 12, 40, 55, 173921701, time.UTC), + uncompressedSize: 4772, + + compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xd4\x57\x4d\x6f\xdc\x36\x13\x3e\x5b\xbf\x62\x20\xbc\x08\xa4\x60\x2d\x1d\xde\xdb\x16\x39\x38\x46\x82\x16\xa8\x9d\x20\x2e\xda\x43\x10\x18\xb4\x34\xab\x25\xac\x25\x55\x92\x5a\xc7\x90\xf5\xdf\x0b\x7e\xe8\x8b\xab\xd5\xda\x86\x51\xa0\xbe\x58\x22\xe7\x19\xce\x0c\x9f\x99\x7d\x94\xa6\x70\xc9\x73\x84\x02\x19\x0a\xa2\x30\x87\xbb\x47\xa8\x04\x57\x3c\x3b\x2f\x90\x9d\xef\x68\x26\x78\x90\xa6\x20\x79\x2d\x32\x5c\x43\xd3\x24\x9f\x69\x89\xc9\x35\xd9\x61\xdb\x06\x15\xc9\xee\x49\x81\xd0\x34\x05\xff\x7a\x5f\xfc\x4e\xa4\xfa\x54\xe2\x0e\x99\x02\x63\x07\x4f\x20\xab\x92\xaa\x0b\x21\xc8\x23\x84\xbf\x84\xf0\x04\x25\x91\xaa\x6d\x83\x80\xee\x2a\x2e\x14\x44\xc1\x59\x98\x71\xa6\xf0\xa7\x0a\x83\xe0\xcc\x9c\x78\x4b\x2a\x0a\x61\x41\xd5\xb6\xbe\x4b\x32\xbe\x4b\x6b\x46\xa5\x22\xd9\xfd\x39\x17\x45\x6a\x4c\xd2\xfd\xff\x53\x52\xd1\x10\x00\xa0\x43\x65\x25\xd5\x27\x9f\x06\x5a\xc3\xb0\x07\x4a\x14\x7b\x14\xcf\x00\x5a\xc3\x30\x88\x83\xa0\x69\xce\xe1\x7f\x37\x28\xf6\x34\x43\x5d\x0d\x58\x7f\x80\xc4\xbd\x9b\xf2\xc0\x13\x28\x41\x77\x37\xf5\x66\x43\x7f\x42\xe8\xb6\x42\xd0\xb9\xa7\x29\x5c\xe3\x43\xd3\x8c\x1d\xb4\xed\x27\x96\x57\x9c\x32\x25\xf5\x15\xec\x69\x8e\x12\x74\x21\xb0\x5f\xde\xa1\xca\x89\x22\xb0\xe1\x02\x7c\x30\x48\xfb\x16\x6c\x6a\x96\x2d\x7a\x8f\x62\xf8\xfe\xe3\x7d\x5f\xe8\xa4\xdb\x80\x26\xd0\xd5\xdc\x13\x31\x3a\x72\xd6\x32\x30\x86\xba\x02\x82\xb0\x02\x87\xbc\xaf\x50\x6d\x79\xde\xb6\xfd\x3e\xdd\x00\x43\x88\xb6\x4a\x55\x7f\xa2\xb8\x83\x24\x86\xd0\x94\x40\x1b\x74\xa7\xe8\xd2\xbd\x3b\x3c\xc5\x86\xa3\xff\x74\x02\x6b\x08\xfd\x8c\x92\xa6\x71\x44\x0c\x57\xbd\xed\x57\xa2\xb6\x6b\xf8\xfe\x43\x2a\x41\x59\xd1\x84\x4d\xa3\x0f\xd7\xab\x90\xb4\x6d\xd8\x0e\x96\x36\xd8\x43\x5b\x1b\xe8\xd4\xd6\xcb\xe5\x23\xcf\x1f\xa7\xb9\xe8\x3f\xbd\x6a\xc2\x1c\x4c\x26\xa1\x69\x27\xc8\xf2\x11\xc2\xb9\xe5\x02\xa2\xe4\xd2\x90\xf2\x46\x09\x24\x3b\xca\x8a\x18\x22\x53\x56\x14\xc3\xd2\x08\x69\x17\xd7\xa0\x44\x8d\x4b\x27\xfc\x4a\x58\x5e\xa2\x58\x43\x28\xaa\xcc\xc5\xd2\x7a\xd7\xd7\x15\x48\x5e\xe4\x39\x55\x94\x33\x52\x7e\xa4\x2c\xa7\xac\x90\x90\xf8\x97\x95\x98\x52\x7e\x00\x52\x55\xc8\xf2\x68\xb2\xbc\xd2\xc9\xeb\x9c\xe3\xe0\x30\x9a\x81\x53\x07\x60\xb9\xea\x77\xe7\x90\xd3\x37\x81\xaa\x16\x6c\x70\x17\xd8\x7e\xf2\xc9\xe1\x5e\x80\x32\x85\x62\x43\x32\x0c\xd4\x63\x85\xa7\xcd\x4c\x17\x2c\x53\xdb\xb4\xbe\xc0\xbf\xed\x92\x69\xfc\xdf\x58\x55\xab\x3f\xf4\x01\xd3\x89\x97\x74\x13\x0f\x46\x48\x59\x8d\x90\x5f\x6a\xf5\x6c\x68\x47\x15\x9f\x17\x33\xec\x19\x63\x08\xcb\x67\x41\x8c\x2b\x98\x07\x6a\xa8\x6b\xad\xc8\x0d\xe7\xe4\xd2\xfe\x5f\xc1\xfb\xa6\x19\xb2\x6f\xdb\x15\x24\x49\x32\x1e\xc0\xc9\x25\x29\xcb\x2f\x95\x26\x52\x0c\x91\x5f\xf0\xdb\xde\xb3\x5b\x5e\x01\x0a\xc1\x45\xec\xe2\xc5\x52\xe2\xe9\x18\xde\xf6\x48\x36\xdc\xeb\xf3\x8e\x7f\x59\x09\x8c\x75\x77\xe9\xda\xfa\xf8\xe9\xe6\xb9\xd5\xbf\x2e\xc7\xe8\xf7\x1a\xf2\xf5\xb0\x17\x32\xcf\xe0\x5e\x46\xba\xd9\x26\x3b\xb8\x00\xaf\xdd\x00\x5c\x65\xa3\x18\xbc\x5a\x9b\xdd\x1b\x64\xf9\x95\x2c\xa2\x1e\xd5\xb4\xb1\x2d\xa2\xd9\xfe\x86\xd9\x7e\x61\x7b\xd2\x05\x87\x53\xd6\x74\xc1\x91\x49\xab\x3d\x5f\xb0\xfc\xb2\xe4\x12\xa3\xc5\x9b\xf4\xa7\x54\x07\x39\x08\xc3\x8f\xc0\xd9\xeb\x14\x23\x8f\x56\x3e\x7a\x3a\x11\xb5\x2f\x2f\xec\x51\xd4\xcf\x8f\xd6\x72\xc3\x3e\x0f\x4f\x69\x0a\x57\x9a\xd0\xe0\xb4\x91\x54\xf5\x66\x33\x3b\x68\xdd\x2f\x4c\x67\xb8\xb5\xaf\xf3\x44\xe8\x6c\xc7\xd7\x7f\xb6\x34\x6c\xff\x75\xb6\xbf\x7c\xc8\x0e\xe2\x60\x66\x9c\xbe\x62\x94\x2c\x34\x8f\xf1\x7a\xc0\x8a\x67\x4d\xac\x17\x7b\x1d\xf8\xf0\xba\x89\xe8\x91\xaf\xf3\x3f\xc7\x35\xfb\x03\xfe\x0d\x0b\x2a\x15\x8a\x63\x9c\x11\x6e\x5f\xfa\x4c\x33\x82\xf7\x04\x3a\x92\x30\x96\xfa\xee\x72\x57\x20\xb7\xc7\x38\xba\x02\x5e\x29\x39\x0c\x76\x07\x74\xbb\xdd\x6c\x37\x49\xb9\x19\x36\xc3\x78\xcd\x2c\xfe\x80\xe2\x33\x15\x9a\x5f\x53\xde\xff\xf7\x89\x7f\x82\x13\xd3\x8a\x4f\x68\xe6\x11\xeb\x8d\xc8\x65\x4b\x6a\x49\x35\x7e\x1e\x44\xef\xec\x54\x02\xa9\x44\x9d\x75\xdf\x3f\x36\x96\x85\x5b\x1c\xb9\xdb\x9a\x4f\x97\x45\x6b\x47\x98\x46\x6e\x2d\x44\x7f\xbb\xdd\xae\x26\xdf\x3e\x96\x03\x27\x3e\xd9\x86\xe0\x0c\x2f\x7b\x0d\xad\xdf\xba\x52\xeb\xaf\xa7\xbf\xa8\xda\x76\xb8\x5e\x60\xc7\xf1\x28\x68\x27\xa0\xa5\xe3\x72\x24\x93\x6b\x7c\xe8\xfa\xe4\x9d\x1f\x43\xb3\x6d\x6d\x2b\x24\x49\x12\xc7\x56\x9b\xbc\x35\x6d\x6d\x5e\xaf\x25\x6f\x07\x7e\x5b\xa1\x62\x6c\x3d\x9d\xb2\xa4\x52\x16\x35\xca\xa2\x42\x79\xb5\x3e\xd1\x47\xf6\xea\xe4\x48\x4b\x4c\x95\x83\xaf\x4a\x96\x74\xc4\xa0\x48\x4e\x79\x5d\x52\x36\x13\x2d\x32\x6e\xe1\x19\x0d\x3c\xaf\x44\xfe\x09\x00\x00\xff\xff\x18\xe4\x19\x27\xa4\x12\x00\x00"), + }, + "/{{.File.Package}}_micro_grpc.pb.go.tmpl": &vfsgen۰CompressedFileInfo{ + name: "{{.File.Package}}_micro_grpc.pb.go.tmpl", + modTime: time.Date(2021, 1, 8, 12, 50, 25, 183502330, time.UTC), + uncompressedSize: 6715, + + compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xcc\x57\x4d\x6f\xa4\x38\x13\x3e\xe3\x5f\x51\x83\x5e\x8d\xe0\x15\xa1\x0f\x7b\xcb\x28\x87\x51\xb4\xa3\x5d\x69\x93\x59\x4d\xf6\x1e\x31\xb4\x9b\xa0\x80\xe9\x18\xd3\xe9\x88\xf0\xdf\x57\x2e\x1b\x30\x6e\xa0\xbb\x43\xb2\xbb\x39\xa4\x85\xed\x72\x7d\x3e\x55\x8f\x57\x2b\xb8\x2e\xd6\x14\x12\xca\x28\x8f\x04\x5d\xc3\xcf\x17\xd8\xf2\x42\x14\xf1\x45\x42\xd9\x45\x9e\xc6\xbc\x20\xab\x15\x94\x45\xc5\x63\x7a\x09\x75\x1d\x7e\x4b\x33\x1a\xde\x46\x39\x6d\x1a\xb2\x8d\xe2\xc7\x28\xa1\x50\xd7\x49\xf1\xe7\x63\xf2\x47\x54\x8a\x5f\x33\x9a\x53\x26\x00\xcf\xc1\x2b\x94\xdb\x2c\x15\x5f\x39\x8f\x5e\xc0\xfd\xe2\xc2\x2b\x64\x51\x29\x9a\x86\x90\x34\xdf\x16\x5c\x80\x47\x1c\x37\x2e\x98\xa0\x7b\xe1\x12\xe2\xa0\xc6\xfb\x38\x4b\xe5\x1d\x6e\x92\x8a\x87\xea\x67\x18\x17\xf9\xaa\x62\x69\x29\xa2\xf8\xf1\xa2\xe0\xc9\x0a\x4f\xad\x76\xbf\xac\xd4\x41\x17\x5a\xc1\x92\xf2\x1d\xe5\x27\x08\xaa\x83\x2e\xf1\x09\xd9\x45\x1c\x3c\x02\x00\x70\x0f\xe6\x2d\xe1\xf7\xad\x48\x0b\x36\xd8\x51\xfa\xda\x1d\x9f\x90\xba\xbe\x80\xff\xa1\xab\x97\x57\xda\xe7\xa6\x51\xab\x77\x94\xef\xd2\x98\xca\x50\xe1\xa6\xfe\xc6\xd8\xc1\x2b\x08\x9e\xe6\x77\xd5\x66\x93\xee\xc1\xd5\x5b\xae\x94\x25\xe2\x65\x2b\x23\x3a\x90\x7f\x85\xac\x78\xa6\xfc\x5b\xca\x65\xf0\xf4\x0e\x94\x82\x57\xb1\x80\x1a\x4d\x8c\x87\x26\x5e\xe3\x0f\xee\x30\x79\x43\x29\x78\xca\x12\xd2\x10\x99\xce\x1b\x79\x12\x74\x94\x4b\x51\x6d\x36\xb8\x7c\x4b\x9f\x87\x7a\x7b\x55\x31\xa7\x91\xa0\xc0\xe8\x33\x94\xed\x92\xd2\xb0\xa9\x58\x3c\x23\xe9\x19\xda\x83\x71\x23\x7d\x98\xd2\xaa\x3c\xe3\x54\x54\x9c\xc1\xe7\x53\x62\x52\xc7\x97\x10\x07\xe8\xf2\x25\xfe\x6f\xa4\xcb\x75\xcd\x23\x96\xd0\x3e\x07\x37\x54\x3c\x14\xeb\x36\x53\x9c\x3e\xa9\x05\xcc\xd3\xef\x6c\x5b\x89\xbf\x64\x12\x86\xd5\x1b\x1a\xd5\xab\xc4\xca\xad\x21\xf6\xbd\x12\xa7\xc9\xa5\x1b\x88\xd8\x1a\x3c\x56\x08\xd0\x11\xb8\x13\x9c\x46\x79\xca\x12\x1f\x3c\xb4\x91\x72\x63\xe9\xa2\x69\x54\x94\xbd\x18\xfe\x7f\x4a\x10\x64\x40\x35\x44\xbd\x58\xec\x41\xe3\x2b\xbc\x56\xbf\x01\x70\xfa\x84\x37\x75\x8e\x37\x4d\x00\xc5\x56\x94\x10\x86\xe1\x30\x43\x51\x96\xa9\x6a\xf7\xc1\xb3\xd3\x74\xdf\xa9\xd1\xcb\x01\x50\xce\x0b\xee\x63\xe2\xa4\xaf\x34\x2b\xa9\x74\xd8\xf6\xf3\x03\x7c\xfa\x30\xf3\xff\x33\xe1\x47\x91\xb6\xe6\xa4\x88\x6d\x2d\x5b\xa3\xb1\xea\x2b\xdd\x40\xc1\xc7\xca\xc9\x3b\x28\x3a\x94\x01\x09\x52\x1a\xe5\x78\xad\xac\xe8\x38\x8c\x43\x75\x46\x7a\x11\xe0\xf7\x2d\x7d\xfe\x41\x9f\x2a\x5a\x0a\x2f\x0e\x25\xbc\x02\x70\xed\xb0\x86\x9d\xf7\x6e\x80\xa0\x35\xdc\xac\x1b\x5f\x79\x1a\x86\xa1\x8f\x4a\xd3\x0d\xea\xfb\x74\x05\x2c\xcd\x34\xe0\x3b\xc8\xb3\x34\x43\x73\x70\xd5\x70\x6c\x0c\x3a\xd0\x34\xf0\xcf\xfe\x99\xf6\x5f\x5e\xe9\xf0\x85\x77\x94\xad\x3d\x4e\x9f\xfc\x2f\xe7\x3a\x46\xd9\x5a\x67\xe2\xac\x8e\xd7\x46\xbb\x56\x06\x34\x81\xbc\xdd\x28\xe0\xf6\xce\x72\x2b\xad\xfc\x3c\x2c\xa2\x5a\x6d\x1a\x29\x97\x25\xb7\x20\xe1\xd2\xf3\x40\x2a\x3b\x29\xcf\xd3\x01\xe9\x76\xf0\x2a\xc3\x23\x0c\x92\xec\xa4\xe7\x15\xf8\xc8\x54\x1d\xe9\x00\x90\x32\x41\xf9\x26\xea\x66\x8f\xc6\xac\xe7\xdb\x28\xc6\x5d\x99\xeb\x9b\x32\xf1\x3a\xa9\xba\xf1\x15\x2a\x71\xfb\x07\x8d\x77\x33\xdb\xe6\x30\x18\x19\x04\x58\xe4\xb6\x73\x3a\x99\xf2\xe6\xaf\x6c\x7d\x9d\x15\x25\xf5\xa6\x5b\x43\xa7\xa6\x2f\xad\x56\xe4\xc0\x0c\xdb\x02\x7d\x1e\xcb\xd9\x6a\x56\xb6\x74\x7f\x7b\x7b\x97\x65\xb6\x61\xf5\xe9\xd6\x9e\x43\x85\xba\x3c\x0e\x39\x91\x42\xc4\x90\x73\x28\xa3\xda\x0a\x3a\x37\xf8\x6a\x10\xec\x4f\x1b\x04\x9d\x55\xfe\xa9\x09\x83\x9a\x38\xf9\x38\x4c\x1d\x8d\xd1\x7d\xd8\x96\x55\xee\x13\x47\xc3\xea\xaa\x85\x95\x83\xc7\xe4\x29\xad\x8a\x38\x0d\xe9\x8e\x7d\xea\x8f\xd9\xb8\x73\x1a\xe2\x90\x76\x35\x57\x88\x53\x7c\x45\x65\xe3\xcd\x9e\x0f\x0a\x6e\xc8\xe8\xf6\xa1\xee\x99\xad\xad\x4b\xd4\x4c\x01\x75\x4a\x65\x7b\x7e\x89\xd2\x16\xff\x39\x1c\x42\x7c\x42\x2f\xe2\x29\x5f\xa4\xb5\xcb\xff\xe9\x5a\x11\x79\x4a\x6b\x5d\x1f\x21\x64\x6f\x0b\x83\x97\xc3\x78\x97\x38\x1a\x07\xa3\xc6\xb4\x6d\x16\xee\x16\xd9\x76\xa4\xe7\xa0\x75\xf9\xd4\x60\xec\x87\xbb\x1d\xc9\x91\xe1\x7e\x38\xc9\xe4\x15\x13\x88\x02\xe5\x6e\xeb\xb9\x49\xdf\xfa\x07\x9a\x7e\xcd\xea\x07\xda\xf1\x66\xf8\x5b\xc4\xd6\x19\x0a\x74\x3d\xd0\x9e\x78\xfa\x08\x1a\x32\xfa\x26\x82\x7f\xed\x51\x74\xc6\x30\x57\xc5\xf0\x70\xa4\x18\xb4\xaf\x47\xe9\xf8\x60\x4a\xe8\xb7\xbf\x52\xd7\x97\xf0\x0c\xf1\x5c\xc0\x3b\x89\x63\x74\x7b\x93\x25\x2f\xe3\xa5\x8e\x73\xc0\x4a\xc7\xcb\xd6\x71\xba\x41\x80\x33\xc0\x19\x30\xaf\x87\x70\xa2\x7a\xc2\x41\x40\x03\x59\xdc\xf3\x54\xb5\xa7\x59\x68\x4d\xcb\x54\x7d\x62\x73\xd4\x33\x15\x2f\xd1\xda\xf1\xc8\xce\x82\xf7\xad\xaa\xd1\x47\x9e\x24\xe1\x56\x27\x32\xdb\xe4\xb9\xfe\x73\xfa\x84\x77\x0e\xdb\xe8\x7b\x53\x63\x05\x8f\x21\x33\x9e\xe3\xc5\xb3\xac\x78\x96\x13\xbf\x99\x11\x4b\x95\x1d\xbd\x1a\x0f\xb0\xf5\xc8\xb2\x79\xf0\x1c\x73\xed\x39\xf0\xb1\x5b\xe7\xb8\xf4\x60\x12\x99\x45\xd1\xb1\xdf\x33\xb9\xaf\x9d\x21\xa3\xed\x4f\xb7\xb4\x8f\x25\xbe\x96\x49\xfe\x30\x31\xf9\x5c\xed\x77\x83\xb6\x23\x55\x7e\x3f\x81\xaf\xcc\x41\xdb\x32\x5c\x8b\x3c\x0e\xe6\xad\x6c\x67\x6f\xa0\xaf\x07\xf6\x1f\xb2\xd7\xc5\xdc\xf5\x50\xc7\x2c\x75\x7d\x07\xe2\x3a\x9a\x95\x59\x06\xb9\x98\xb5\x1e\xa8\x3c\x4a\x5a\xa7\x29\xeb\x34\x34\x17\xb8\x3f\x5f\x8c\xa7\xb0\x55\x84\xd1\x08\xd2\x17\x04\x68\xa6\x3d\x58\x44\xd5\x24\x0b\x1f\x4a\x54\x47\x79\x6a\xd3\x90\xbf\x03\x00\x00\xff\xff\x0e\x12\x5e\x53\x3b\x1a\x00\x00"), + }, + "/{{.File.Package}}_micro_http.pb.go.tmpl": &vfsgen۰CompressedFileInfo{ + name: "{{.File.Package}}_micro_http.pb.go.tmpl", + modTime: time.Date(2021, 1, 8, 12, 50, 4, 963084783, time.UTC), + uncompressedSize: 6871, + + compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff\xcc\x58\xcb\x6f\xdb\x36\x18\x3f\x8b\x7f\xc5\x57\x21\x28\xa4\x41\xa6\x0f\xbb\xa5\xf0\xa1\x08\x5a\x6c\xc3\xd2\x16\xc9\xb0\x4b\x51\x04\xac\xfc\xd9\x16\x22\x51\x0a\x45\x3b\x29\x14\xfe\xef\x03\x1f\x92\x25\x59\xb6\xe5\x3a\xd9\xd6\x43\x13\x91\xdf\xfb\xf9\x63\xa6\x53\xb8\xca\xe7\x08\x4b\xe4\x28\x98\xc4\x39\x7c\xff\x01\x85\xc8\x65\x1e\x4f\x96\xc8\x27\x59\x12\x8b\x9c\x4c\xa7\x50\xe6\x6b\x11\xe3\x25\x54\x15\xfd\x98\xa4\x48\x3f\xb1\x0c\x95\x22\x05\x8b\xef\xd9\x12\xa1\xaa\x96\xf9\x97\xfb\xe5\x9f\xac\x94\x1f\x52\xcc\x90\x4b\x30\x74\xf0\x0c\x65\x91\x26\xf2\xbd\x10\xec\x07\xf8\xef\x7c\x78\x86\x94\x95\x52\x29\x42\x92\xac\xc8\x85\x84\x80\x78\x7e\x9c\x73\x89\x4f\xd2\x27\xc4\x33\x1a\xef\xe2\x34\xd1\x32\xfc\x65\x22\x57\xeb\xef\x34\xce\xb3\xe9\x9a\x27\xa5\x64\xf1\xfd\x24\x17\xcb\xa9\xa1\x9a\x6e\x7e\x9d\x5a\x42\x1f\x6a\xc6\x12\xc5\x06\xc5\x08\x46\x4b\xe8\x77\x15\xde\xad\xa4\x2c\x8e\x30\x4f\x2c\xe9\x44\x93\xfa\x24\x24\x64\xc3\x04\x04\x04\x00\xe0\x0e\xda\x46\xd0\xcf\x85\x4c\x72\xde\xb9\xb1\xbc\xf5\x4d\x48\x48\x55\x4d\xe0\xc2\x44\xea\x72\xe6\x42\xa6\x94\x3d\xbd\x45\xb1\x49\x62\xd4\x91\x36\x97\xee\xdb\x84\x1e\x9e\x41\x8a\x24\xbb\x5d\x2f\x16\xc9\x13\xf8\xee\xca\xd7\xbc\x44\xfe\x28\x74\x42\x3a\xfc\xcf\x90\xe6\x8f\x28\x3e\x26\x42\xc7\xde\xdd\x40\x29\xc5\x3a\x96\x50\x19\x13\xe3\xae\x89\x57\xe6\x87\xb9\xe1\x5a\x42\x29\x45\xc2\x97\x44\x11\x5d\x0d\xd7\x9a\x12\x5c\x92\x4a\xb9\x5e\x2c\xcc\xf1\x27\x7c\xec\xea\xdd\xaa\x8a\x05\x32\x89\xc0\xf1\x11\xca\xfa\xc8\x6a\x58\xac\x79\x7c\x80\x33\x68\x69\x8f\x86\x8d\x0c\x61\x9f\x56\xeb\x99\x40\xb9\x16\x1c\xde\x8e\x89\x49\x15\x5f\x42\x1c\x19\x97\x2f\xcd\xff\x4a\xbb\x5c\x55\x82\xf1\x25\x6e\x73\x70\x8d\x72\x95\xcf\xeb\x4c\x09\x7c\xb0\x07\x26\x4f\xbf\xf3\x62\x2d\xff\xd2\x49\xe8\x16\x3f\x6d\x15\xbf\x65\x2b\x8b\x16\xdb\xe7\xb5\x1c\xc7\x97\x2c\x80\xf1\x39\x04\x3c\x97\xe0\x22\x70\x2b\x05\xb2\x2c\xe1\xcb\x10\x02\x63\x23\x8a\xd6\xd1\x44\x29\x1b\xe5\x20\x86\x5f\xc6\x04\x41\x07\xd4\x75\x78\x10\xcb\x27\x70\xed\x49\xaf\xec\xcf\x08\x04\x3e\x18\x49\x8d\xe3\x4a\x45\x90\x17\xb2\x04\x4a\x69\x37\x43\x2c\x4d\x6d\xb5\x87\x10\xf4\xd3\x74\xd7\xa8\x71\xc7\x11\xa0\x10\xb9\x08\x4d\xe2\xb4\xaf\x98\x96\xa8\x1d\xee\xfb\xf9\x0a\x3e\xbd\x9a\xf9\xff\x9b\xf0\x1b\x96\xba\xe6\x34\x4b\xdf\x5a\x3e\xd7\x03\x44\xb7\x8c\xab\xb3\x20\x2f\x90\xb3\x22\xb1\x22\x80\x86\xf4\x06\xcb\x22\xe7\x25\x96\x35\x25\x0a\x91\xb1\x42\x57\x70\xc6\xee\x31\xc8\x58\xf1\xd5\x36\xeb\xb7\x84\x4b\x14\x0b\x16\x63\xa5\x22\xa8\x2a\x48\x91\x1f\x12\xa8\x54\xd8\xa8\xb6\xdd\x76\x71\x1f\xc1\xc5\x46\x8b\x1e\x6d\xc7\x57\xbf\xaa\x2e\xee\x95\xf2\xbf\xc1\x4c\x37\xfc\x04\x82\x25\xca\x6b\x2c\x4b\xb6\x44\xd3\x5b\x76\xd8\x5e\x6c\xe8\x6d\xbc\xc2\x8c\xd1\x3f\xca\x9c\xbb\x5f\x6f\x70\x11\xda\xe9\xaa\x54\xb5\x8d\x43\x2f\x2c\xad\x4f\x6e\x62\x7e\x39\x03\x56\x14\xc8\xe7\x81\xfe\x8c\xcc\x0d\xc0\xce\x5a\x71\x33\x23\xf0\xab\x4a\x7f\xfe\x8d\xe2\x3b\x50\xa5\xfc\x30\x82\xbd\x2c\x5f\x98\x5c\xd5\x0c\xfa\x77\xc7\xe0\xe8\xc7\x27\x69\x48\xf6\x07\x9d\xfc\x6b\x56\x04\x36\x72\x1d\xa9\x2d\x1f\x43\xd2\x28\xca\xc5\xd0\x74\x09\x76\x66\x90\xe3\x2c\xcd\x81\xa9\x32\x1d\xa3\x98\xc6\xd4\xd2\xe8\xa2\x8e\xcc\xf7\x27\x7c\xbc\xc1\x87\x35\x96\x32\x88\xa9\x9e\xb6\x11\xf8\xfd\x2e\xa3\x4d\x33\xf8\x91\x99\xe1\xad\xaa\xaf\x54\x18\xd9\x2c\x50\x4a\x6d\xf9\x24\x0b\xa3\xf0\xcd\x0c\x78\x92\xba\x05\xd0\xac\x00\x9e\xa4\xc6\x1e\x73\xaa\xb6\x9e\x0d\x8d\x52\x50\x0a\xfe\xdd\x7f\x6d\xfb\x2f\x67\x2e\x7e\xf4\x56\x57\x96\xc0\x87\xf0\xdd\xa9\x8e\x21\x9f\xbb\x54\x9c\xb4\x01\xeb\x70\x57\xd6\x00\x15\x69\xe9\xad\x81\x56\xcb\x2c\x4d\xd7\xbf\xed\x0e\x95\xaa\xe9\xc6\x3a\xe7\x7a\x04\x9d\x91\x71\xed\x79\xa4\x95\x8d\x4b\xf4\xfe\x88\x34\x37\x56\xd6\xd6\x25\x13\x25\xb3\xe4\x4f\xab\xf1\xd1\x38\xab\x71\xa7\x0b\xb8\x6c\x78\xbb\x80\xc6\x2a\x20\xca\x1a\x63\xd6\xfc\xc0\x8a\x37\xe5\xda\x37\xb2\xd9\x32\x4f\xe3\xb6\x4c\x63\x55\x08\x37\x18\x6f\xde\xf3\xf9\x55\x9a\x97\x18\x1c\x5c\x14\x5e\x36\x9c\x73\xcf\x25\xfc\x89\x6a\x59\xd7\xe5\x32\xc8\x42\xe2\xb9\x14\xcd\xea\x14\x79\x86\x4c\x53\x39\x55\xc4\x53\xa4\x21\x7b\xb3\x25\xeb\xe7\xd0\x53\xc4\x23\xf5\x69\x66\xb3\x67\xc1\x90\xcd\xde\x4f\x7b\x5e\xbb\x6c\x1c\xec\xc2\xc5\x27\xea\x1a\xb0\xb6\xf5\x1c\x35\x76\x73\x07\x61\x7f\x97\xef\x53\x59\xd3\x9f\xa3\x54\x0f\x0e\x93\x09\x68\x6d\xe2\xc3\xae\x9a\x59\x93\x9d\xa5\xb5\xc9\xff\x78\xad\x9a\xc5\x69\xad\xaa\x23\x68\xef\xe7\xc2\x10\x64\x7d\xbc\x34\x36\x0e\xad\x1a\x73\xb6\xf5\xfa\xee\x2c\xdb\x8c\xe7\x47\x90\x59\xb6\x6f\xca\x6e\x37\x45\x3f\x92\x03\x9b\x62\x77\x2a\x6a\x11\x7b\x3a\x0a\xac\xbb\xb5\xe7\xf5\xa9\xf6\x74\xfb\xfa\x73\x2f\x6d\xf7\xfa\x3b\x3e\x0c\x7f\x63\x7c\x9e\x1a\x86\x66\x06\xf6\x07\xbf\x23\x31\x86\x0c\x3e\xb8\xe0\x3f\x7b\x71\x9d\xb0\x17\x6c\x31\xac\x8e\x14\x83\xf3\xf5\x28\xd6\xef\x6c\x09\xf7\x87\x05\xab\x6e\x5b\xc2\x07\x50\xcc\x19\x20\x86\x78\xad\x69\xdf\xc6\x5c\xe7\x81\x1c\xcf\xdb\x81\x38\xc3\x65\xeb\x79\xcd\x22\x30\x3b\xc0\xeb\x6c\xf1\x15\xdd\x53\x3d\xb4\x13\xd0\x48\x17\xf7\x61\xdc\xb3\x7d\xc5\x19\x6b\x6a\xd8\x13\x92\x3e\xe0\x39\x51\xf1\x39\x5a\x1d\x26\x69\x59\xf0\xb2\x55\x35\xf8\x82\xd4\x88\xae\x37\x89\xda\x63\xf2\x54\xff\x05\x3e\x18\x99\xdd\x31\xfa\x02\x28\xab\xf3\xf2\xb6\xed\xd1\xec\x1a\x63\xe9\xde\x75\x4b\xa0\xd9\x8a\xbb\xdb\x89\x40\xb3\xbc\x06\x2f\xdb\x7f\x7b\x39\x01\x94\x59\x95\x0d\xbc\x1a\x0e\x70\x0f\xb1\x77\x60\xc9\x56\x75\x5f\x7c\x23\x7d\x9c\xd4\x5a\xca\xce\x88\x70\x9e\x37\x9b\xa8\x5d\x14\x76\x13\xf5\xb1\xf2\xf1\x71\xdf\xcf\x50\x6b\xec\xef\x1f\x69\xaf\x0b\x7c\x7b\x26\x85\xdd\xc4\x64\x87\x6a\xbf\x59\xb4\x0d\xa8\x0a\xb7\x1b\x78\xd6\x5e\xb4\x35\xc2\xed\x81\xc7\xce\xbe\xd5\xe3\xec\x27\xe0\xeb\x8e\xfd\xbb\xe8\xf5\x6c\xec\xba\xab\xe3\x20\x74\x7d\x01\xe0\x3a\x98\x95\x83\x08\xf2\x6c\xd4\xba\xa3\xf2\x28\x68\xdd\x0f\x59\xf7\xb7\xe6\x19\xee\x1f\x2e\xc6\x31\x68\xd5\xb4\xd1\x40\xa7\x9f\x11\xa0\x03\xe3\xa1\x07\x54\xdb\x60\xe1\x55\x81\xea\x20\x4e\x55\x8a\xfc\x13\x00\x00\xff\xff\x62\xe3\x87\x9c\xd7\x1a\x00\x00"), + }, + } + fs["/"].(*vfsgen۰DirInfo).entries = []os.FileInfo{ + fs["/{{.File.Package}}_micro.pb.go.tmpl"].(os.FileInfo), + fs["/{{.File.Package}}_micro_grpc.pb.go.tmpl"].(os.FileInfo), + fs["/{{.File.Package}}_micro_http.pb.go.tmpl"].(os.FileInfo), + } + + return fs +}() + +type vfsgen۰FS map[string]interface{} + +func (fs vfsgen۰FS) Open(path string) (http.File, error) { + path = pathpkg.Clean("/" + path) + f, ok := fs[path] + if !ok { + return nil, &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist} + } + + switch f := f.(type) { + case *vfsgen۰CompressedFileInfo: + gr, err := gzip.NewReader(bytes.NewReader(f.compressedContent)) + if err != nil { + // This should never happen because we generate the gzip bytes such that they are always valid. + panic("unexpected error reading own gzip compressed bytes: " + err.Error()) + } + return &vfsgen۰CompressedFile{ + vfsgen۰CompressedFileInfo: f, + gr: gr, + }, nil + case *vfsgen۰DirInfo: + return &vfsgen۰Dir{ + vfsgen۰DirInfo: f, + }, nil + default: + // This should never happen because we generate only the above types. + panic(fmt.Sprintf("unexpected type %T", f)) + } +} + +// vfsgen۰CompressedFileInfo is a static definition of a gzip compressed file. +type vfsgen۰CompressedFileInfo struct { + name string + modTime time.Time + compressedContent []byte + uncompressedSize int64 +} + +func (f *vfsgen۰CompressedFileInfo) Readdir(count int) ([]os.FileInfo, error) { + return nil, fmt.Errorf("cannot Readdir from file %s", f.name) +} +func (f *vfsgen۰CompressedFileInfo) Stat() (os.FileInfo, error) { return f, nil } + +func (f *vfsgen۰CompressedFileInfo) GzipBytes() []byte { + return f.compressedContent +} + +func (f *vfsgen۰CompressedFileInfo) Name() string { return f.name } +func (f *vfsgen۰CompressedFileInfo) Size() int64 { return f.uncompressedSize } +func (f *vfsgen۰CompressedFileInfo) Mode() os.FileMode { return 0444 } +func (f *vfsgen۰CompressedFileInfo) ModTime() time.Time { return f.modTime } +func (f *vfsgen۰CompressedFileInfo) IsDir() bool { return false } +func (f *vfsgen۰CompressedFileInfo) Sys() interface{} { return nil } + +// vfsgen۰CompressedFile is an opened compressedFile instance. +type vfsgen۰CompressedFile struct { + *vfsgen۰CompressedFileInfo + gr *gzip.Reader + grPos int64 // Actual gr uncompressed position. + seekPos int64 // Seek uncompressed position. +} + +func (f *vfsgen۰CompressedFile) Read(p []byte) (n int, err error) { + if f.grPos > f.seekPos { + // Rewind to beginning. + err = f.gr.Reset(bytes.NewReader(f.compressedContent)) + if err != nil { + return 0, err + } + f.grPos = 0 + } + if f.grPos < f.seekPos { + // Fast-forward. + _, err = io.CopyN(ioutil.Discard, f.gr, f.seekPos-f.grPos) + if err != nil { + return 0, err + } + f.grPos = f.seekPos + } + n, err = f.gr.Read(p) + f.grPos += int64(n) + f.seekPos = f.grPos + return n, err +} +func (f *vfsgen۰CompressedFile) Seek(offset int64, whence int) (int64, error) { + switch whence { + case io.SeekStart: + f.seekPos = 0 + offset + case io.SeekCurrent: + f.seekPos += offset + case io.SeekEnd: + f.seekPos = f.uncompressedSize + offset + default: + panic(fmt.Errorf("invalid whence value: %v", whence)) + } + return f.seekPos, nil +} +func (f *vfsgen۰CompressedFile) Close() error { + return f.gr.Close() +} + +// vfsgen۰DirInfo is a static definition of a directory. +type vfsgen۰DirInfo struct { + name string + modTime time.Time + entries []os.FileInfo +} + +func (d *vfsgen۰DirInfo) Read([]byte) (int, error) { + return 0, fmt.Errorf("cannot Read from directory %s", d.name) +} +func (d *vfsgen۰DirInfo) Close() error { return nil } +func (d *vfsgen۰DirInfo) Stat() (os.FileInfo, error) { return d, nil } + +func (d *vfsgen۰DirInfo) Name() string { return d.name } +func (d *vfsgen۰DirInfo) Size() int64 { return 0 } +func (d *vfsgen۰DirInfo) Mode() os.FileMode { return 0755 | os.ModeDir } +func (d *vfsgen۰DirInfo) ModTime() time.Time { return d.modTime } +func (d *vfsgen۰DirInfo) IsDir() bool { return true } +func (d *vfsgen۰DirInfo) Sys() interface{} { return nil } + +// vfsgen۰Dir is an opened dir instance. +type vfsgen۰Dir struct { + *vfsgen۰DirInfo + pos int // Position within entries for Seek and Readdir. +} + +func (d *vfsgen۰Dir) Seek(offset int64, whence int) (int64, error) { + if offset == 0 && whence == io.SeekStart { + d.pos = 0 + return 0, nil + } + return 0, fmt.Errorf("unsupported Seek in directory %s", d.name) +} + +func (d *vfsgen۰Dir) Readdir(count int) ([]os.FileInfo, error) { + if d.pos >= len(d.entries) && count > 0 { + return nil, io.EOF + } + if count <= 0 || count > len(d.entries)-d.pos { + count = len(d.entries) - d.pos + } + e := d.entries[d.pos : d.pos+count] + d.pos += count + return e, nil +} diff --git a/assets/web-editor.jpg b/assets/web-editor.jpg deleted file mode 100644 index 490b552..0000000 Binary files a/assets/web-editor.jpg and /dev/null differ diff --git a/cmd/web-editor/main.go b/cmd/web-editor/main.go deleted file mode 100644 index 0bdffdc..0000000 --- a/cmd/web-editor/main.go +++ /dev/null @@ -1,121 +0,0 @@ -package main - -import ( - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "log" - "net/http" - "os" - "os/exec" - "path/filepath" - - packr "github.com/gobuffalo/packr/v2" - "github.com/gorilla/handlers" - "github.com/gorilla/mux" -) - -func generate(w http.ResponseWriter, r *http.Request) { - // read input - decoder := json.NewDecoder(r.Body) - type Input struct { - Protobuf string `json:"protobuf"` - Template string `json:"template"` - } - var input Input - if err := decoder.Decode(&input); err != nil { - returnError(w, err) - return - } - - // create workspace - dir, err := ioutil.TempDir("", "pggt") - if err != nil { - returnError(w, err) - } - // clean up - defer func() { - if err = os.RemoveAll(dir); err != nil { - log.Printf("error: failed to remove temporary directory: %v", err) - } - }() - if err = ioutil.WriteFile(filepath.Join(dir, "example.proto"), []byte(input.Protobuf), 0644); err != nil { - returnError(w, err) - return - } - if err = ioutil.WriteFile(filepath.Join(dir, "example.output.tmpl"), []byte(input.Template), 0644); err != nil { - returnError(w, err) - return - } - - // generate - cmd := exec.Command("protoc", "-I"+dir, "--gotemplate_out=template_dir="+dir+",debug=true:"+dir, filepath.Join(dir, "example.proto")) // #nosec - out, err := cmd.CombinedOutput() - if err != nil { - returnError(w, errors.New(string(out))) - return - } - - // read output - content, err := ioutil.ReadFile(filepath.Join(dir, "example.output")) // #nosec - if err != nil { - returnError(w, err) - return - } - - returnContent(w, content) -} - -func returnContent(w http.ResponseWriter, output interface{}) { - payload := map[string]interface{}{ - "output": fmt.Sprintf("%s", output), - } - response, err := json.Marshal(payload) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - if _, err := w.Write(response); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - } -} - -func returnError(w http.ResponseWriter, err error) { - payload := map[string]interface{}{ - "error": fmt.Sprintf("%v", err), - } - response, err := json.Marshal(payload) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(http.StatusInternalServerError) - if _, err := w.Write(response); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - } -} - -func main() { - r := mux.NewRouter() - - box := packr.New("static", "./static") - - r.Handle("/", http.FileServer(box)) - r.HandleFunc("/generate", generate) - addr := fmt.Sprintf(":%s", os.Getenv("PORT")) - if addr == ":" { - addr = ":8080" - } - - fmt.Printf("Listening on %s...\n", addr) - h := handlers.LoggingHandler(os.Stderr, r) - h = handlers.CompressHandler(h) - h = handlers.RecoveryHandler()(h) - if err := http.ListenAndServe(addr, h); err != nil { - panic(err) - } -} diff --git a/cmd/web-editor/static/index.html b/cmd/web-editor/static/index.html deleted file mode 100644 index bc8af00..0000000 --- a/cmd/web-editor/static/index.html +++ /dev/null @@ -1,138 +0,0 @@ - - - protoc-gen-gotemplate web editor - - - - - - - - - - - - - - - - - - - - - - -
-
-
-
-
- `protoc-gen-gotemplate`: input -
-
- -
-
-
-
- -
-
-
-
-
-
-
-
-
-
- Output - -
-
-
-
-
-
-
-
Command: protoc --gotemplate_out=template_dir=.:. example.proto
- -
-
-
- Fork me on GitHub - - diff --git a/contrib/homebrew/protoc-gen-gotemplate.rb b/contrib/homebrew/protoc-gen-gotemplate.rb deleted file mode 100644 index 74241a5..0000000 --- a/contrib/homebrew/protoc-gen-gotemplate.rb +++ /dev/null @@ -1,20 +0,0 @@ -require "language/go" - -class ProtocGenGotemplate < Formula - desc "protocol generator + golang text/template (protobuf)" - homepage "https://github.com/moul/protoc-gen-gotemplate" - url "https://github.com/moul/protoc-gen-gotemplate/archive/v1.0.0.tar.gz" - sha256 "1ff57cd8513f1e871cf71dc8f2099bf64204af0df1b7397370827083e95bbb82" - head "https://github.com/moul/protoc-gen-gotemplate.git" - - depends_on "go" => :build - - def install - ENV["GOPATH"] = buildpath - ENV["GOBIN"] = buildpath - ENV["GO15VENDOREXPERIMENT"] = "1" - (buildpath/"src/github.com/moul/protoc-gen-gotemplate").install Dir["*"] - - system "go", "build", "-o", "#{bin}/protoc-gen-gotemplate", "-v", "github.com/moul/protoc-gen-gotemplate" - end -end diff --git a/encoder.go b/encoder.go index 3742886..fe33c9a 100644 --- a/encoder.go +++ b/encoder.go @@ -12,8 +12,7 @@ import ( "github.com/golang/protobuf/protoc-gen-go/descriptor" plugin_go "github.com/golang/protobuf/protoc-gen-go/plugin" - - pgghelpers "moul.io/protoc-gen-gotemplate/helpers" + pgghelpers "github.com/unistack-org/protoc-gen-micro/helpers" ) type GenericTemplateBasedEncoder struct { diff --git a/examples/arithmetics/Makefile b/examples/arithmetics/Makefile deleted file mode 100644 index 60bc04f..0000000 --- a/examples/arithmetics/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true,all=true:output proto/*.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/arithmetics/output/arithmetics b/examples/arithmetics/output/arithmetics deleted file mode 100644 index bd80b8a..0000000 --- a/examples/arithmetics/output/arithmetics +++ /dev/null @@ -1,5 +0,0 @@ - -add(1,2) = 3 -subtract(1,2) = -1 -multiply(1,2) = 2 -divide(2,1) = 2 diff --git a/examples/arithmetics/proto/arithmetics.proto b/examples/arithmetics/proto/arithmetics.proto deleted file mode 100644 index 92238fe..0000000 --- a/examples/arithmetics/proto/arithmetics.proto +++ /dev/null @@ -1,2 +0,0 @@ -syntax = "proto3"; -package Arithmetics; diff --git a/examples/arithmetics/templates/arithmetics.tmpl b/examples/arithmetics/templates/arithmetics.tmpl deleted file mode 100644 index c877598..0000000 --- a/examples/arithmetics/templates/arithmetics.tmpl +++ /dev/null @@ -1,6 +0,0 @@ -{{with $a := 1}}{{with $b := 2}} -add(1,2) = {{add $a $b}} -subtract(1,2) = {{subtract $a $b}} -multiply(1,2) = {{multiply $a $b}} -divide(2,1) = {{divide $b $a}} -{{end}}{{end}} \ No newline at end of file diff --git a/examples/concat/Makefile b/examples/concat/Makefile deleted file mode 100644 index 60bc04f..0000000 --- a/examples/concat/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true,all=true:output proto/*.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/concat/output/concat.txt b/examples/concat/output/concat.txt deleted file mode 100644 index 83e9930..0000000 --- a/examples/concat/output/concat.txt +++ /dev/null @@ -1,3 +0,0 @@ -I'm Eric -I'm Francis -I'm Arnold diff --git a/examples/concat/output/static.txt b/examples/concat/output/static.txt deleted file mode 100644 index 410455f..0000000 --- a/examples/concat/output/static.txt +++ /dev/null @@ -1 +0,0 @@ -This is static text.This is static text.This is static text. \ No newline at end of file diff --git a/examples/concat/proto/Eric.proto b/examples/concat/proto/Eric.proto deleted file mode 100644 index ce20a4b..0000000 --- a/examples/concat/proto/Eric.proto +++ /dev/null @@ -1,2 +0,0 @@ -syntax = "proto3"; -package Eric; diff --git a/examples/concat/proto/Francis.proto b/examples/concat/proto/Francis.proto deleted file mode 100644 index c381e5c..0000000 --- a/examples/concat/proto/Francis.proto +++ /dev/null @@ -1,2 +0,0 @@ -syntax = "proto3"; -package Francis; diff --git a/examples/concat/proto/arnold.proto b/examples/concat/proto/arnold.proto deleted file mode 100644 index f284210..0000000 --- a/examples/concat/proto/arnold.proto +++ /dev/null @@ -1,2 +0,0 @@ -syntax = "proto3"; -package Arnold; diff --git a/examples/concat/templates/concat.txt.tmpl b/examples/concat/templates/concat.txt.tmpl deleted file mode 100644 index 308309e..0000000 --- a/examples/concat/templates/concat.txt.tmpl +++ /dev/null @@ -1 +0,0 @@ -I'm {{.File.Package}} diff --git a/examples/concat/templates/static.txt.tmpl b/examples/concat/templates/static.txt.tmpl deleted file mode 100644 index 82165d2..0000000 --- a/examples/concat/templates/static.txt.tmpl +++ /dev/null @@ -1 +0,0 @@ -This is static text. \ No newline at end of file diff --git a/examples/dummy/Makefile b/examples/dummy/Makefile deleted file mode 100644 index cc27729..0000000 --- a/examples/dummy/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true:output *.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/dummy/dummy.proto b/examples/dummy/dummy.proto deleted file mode 100644 index c7ab48f..0000000 --- a/examples/dummy/dummy.proto +++ /dev/null @@ -1,32 +0,0 @@ -syntax = "proto3"; - -package dummy; - -message Dummy1 { - float aaa = 1; - string bbb = 2; - int32 ccc = 3; - int64 ddd = 4; - repeated string eee = 5; -} - -message Dummy2 { - float fff = 1; - Dummy1 ggg = 2; -} - -message Dummy3 {} - -service DummyService { - rpc Hhh(Dummy1) returns (Dummy2) {} - rpc Iii(Dummy2) returns (Dummy1) {} -} - -/* -service DummyService2 { - rpc Jjj(Dummy2) returns (Dummy2) {} -} - -service DummyService3 { -} -*/ \ No newline at end of file diff --git a/examples/dummy/output/export.json b/examples/dummy/output/export.json deleted file mode 100644 index e3735f8..0000000 --- a/examples/dummy/output/export.json +++ /dev/null @@ -1,844 +0,0 @@ -{ - "build-date": "2017-05-19T20:09:45.954357761+02:00", - "build-hostname": "manfred-spacegray.aircard", - "build-user": "moul", - "pwd": "/Users/moul/Git/moul/protoc-gen-gotemplate/examples/dummy", - "debug": false, - "destination-dir": ".", - "file": { - "name": "dummy.proto", - "package": "dummy", - "message_type": [ - { - "name": "Dummy1", - "field": [ - { - "name": "aaa", - "number": 1, - "label": 1, - "type": 2, - "json_name": "aaa" - }, - { - "name": "bbb", - "number": 2, - "label": 1, - "type": 9, - "json_name": "bbb" - }, - { - "name": "ccc", - "number": 3, - "label": 1, - "type": 5, - "json_name": "ccc" - }, - { - "name": "ddd", - "number": 4, - "label": 1, - "type": 3, - "json_name": "ddd" - }, - { - "name": "eee", - "number": 5, - "label": 3, - "type": 9, - "json_name": "eee" - } - ] - }, - { - "name": "Dummy2", - "field": [ - { - "name": "fff", - "number": 1, - "label": 1, - "type": 2, - "json_name": "fff" - }, - { - "name": "ggg", - "number": 2, - "label": 1, - "type": 11, - "type_name": ".dummy.Dummy1", - "json_name": "ggg" - } - ] - }, - { - "name": "Dummy3" - } - ], - "service": [ - { - "name": "DummyService", - "method": [ - { - "name": "Hhh", - "input_type": ".dummy.Dummy1", - "output_type": ".dummy.Dummy2", - "options": {} - }, - { - "name": "Iii", - "input_type": ".dummy.Dummy2", - "output_type": ".dummy.Dummy1", - "options": {} - } - ] - } - ], - "source_code_info": { - "location": [ - { - "span": [ - 0, - 0, - 22, - 1 - ] - }, - { - "path": [ - 12 - ], - "span": [ - 0, - 0, - 18 - ] - }, - { - "path": [ - 2 - ], - "span": [ - 2, - 8, - 13 - ] - }, - { - "path": [ - 4, - 0 - ], - "span": [ - 4, - 0, - 10, - 1 - ] - }, - { - "path": [ - 4, - 0, - 1 - ], - "span": [ - 4, - 8, - 14 - ] - }, - { - "path": [ - 4, - 0, - 2, - 0 - ], - "span": [ - 5, - 2, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 0, - 4 - ], - "span": [ - 5, - 2, - 4, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 0, - 5 - ], - "span": [ - 5, - 2, - 7 - ] - }, - { - "path": [ - 4, - 0, - 2, - 0, - 1 - ], - "span": [ - 5, - 8, - 11 - ] - }, - { - "path": [ - 4, - 0, - 2, - 0, - 3 - ], - "span": [ - 5, - 14, - 15 - ] - }, - { - "path": [ - 4, - 0, - 2, - 1 - ], - "span": [ - 6, - 2, - 17 - ] - }, - { - "path": [ - 4, - 0, - 2, - 1, - 4 - ], - "span": [ - 6, - 2, - 5, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 1, - 5 - ], - "span": [ - 6, - 2, - 8 - ] - }, - { - "path": [ - 4, - 0, - 2, - 1, - 1 - ], - "span": [ - 6, - 9, - 12 - ] - }, - { - "path": [ - 4, - 0, - 2, - 1, - 3 - ], - "span": [ - 6, - 15, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 2 - ], - "span": [ - 7, - 2, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 2, - 4 - ], - "span": [ - 7, - 2, - 6, - 17 - ] - }, - { - "path": [ - 4, - 0, - 2, - 2, - 5 - ], - "span": [ - 7, - 2, - 7 - ] - }, - { - "path": [ - 4, - 0, - 2, - 2, - 1 - ], - "span": [ - 7, - 8, - 11 - ] - }, - { - "path": [ - 4, - 0, - 2, - 2, - 3 - ], - "span": [ - 7, - 14, - 15 - ] - }, - { - "path": [ - 4, - 0, - 2, - 3 - ], - "span": [ - 8, - 2, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 3, - 4 - ], - "span": [ - 8, - 2, - 7, - 16 - ] - }, - { - "path": [ - 4, - 0, - 2, - 3, - 5 - ], - "span": [ - 8, - 2, - 7 - ] - }, - { - "path": [ - 4, - 0, - 2, - 3, - 1 - ], - "span": [ - 8, - 8, - 11 - ] - }, - { - "path": [ - 4, - 0, - 2, - 3, - 3 - ], - "span": [ - 8, - 14, - 15 - ] - }, - { - "path": [ - 4, - 0, - 2, - 4 - ], - "span": [ - 9, - 2, - 26 - ] - }, - { - "path": [ - 4, - 0, - 2, - 4, - 4 - ], - "span": [ - 9, - 2, - 10 - ] - }, - { - "path": [ - 4, - 0, - 2, - 4, - 5 - ], - "span": [ - 9, - 11, - 17 - ] - }, - { - "path": [ - 4, - 0, - 2, - 4, - 1 - ], - "span": [ - 9, - 18, - 21 - ] - }, - { - "path": [ - 4, - 0, - 2, - 4, - 3 - ], - "span": [ - 9, - 24, - 25 - ] - }, - { - "path": [ - 4, - 1 - ], - "span": [ - 12, - 0, - 15, - 1 - ] - }, - { - "path": [ - 4, - 1, - 1 - ], - "span": [ - 12, - 8, - 14 - ] - }, - { - "path": [ - 4, - 1, - 2, - 0 - ], - "span": [ - 13, - 2, - 16 - ] - }, - { - "path": [ - 4, - 1, - 2, - 0, - 4 - ], - "span": [ - 13, - 2, - 12, - 16 - ] - }, - { - "path": [ - 4, - 1, - 2, - 0, - 5 - ], - "span": [ - 13, - 2, - 7 - ] - }, - { - "path": [ - 4, - 1, - 2, - 0, - 1 - ], - "span": [ - 13, - 8, - 11 - ] - }, - { - "path": [ - 4, - 1, - 2, - 0, - 3 - ], - "span": [ - 13, - 14, - 15 - ] - }, - { - "path": [ - 4, - 1, - 2, - 1 - ], - "span": [ - 14, - 2, - 17 - ] - }, - { - "path": [ - 4, - 1, - 2, - 1, - 4 - ], - "span": [ - 14, - 2, - 13, - 16 - ] - }, - { - "path": [ - 4, - 1, - 2, - 1, - 6 - ], - "span": [ - 14, - 2, - 8 - ] - }, - { - "path": [ - 4, - 1, - 2, - 1, - 1 - ], - "span": [ - 14, - 9, - 12 - ] - }, - { - "path": [ - 4, - 1, - 2, - 1, - 3 - ], - "span": [ - 14, - 15, - 16 - ] - }, - { - "path": [ - 4, - 2 - ], - "span": [ - 17, - 0, - 17 - ] - }, - { - "path": [ - 4, - 2, - 1 - ], - "span": [ - 17, - 8, - 14 - ] - }, - { - "path": [ - 6, - 0 - ], - "span": [ - 19, - 0, - 22, - 1 - ] - }, - { - "path": [ - 6, - 0, - 1 - ], - "span": [ - 19, - 8, - 20 - ] - }, - { - "path": [ - 6, - 0, - 2, - 0 - ], - "span": [ - 20, - 2, - 37 - ] - }, - { - "path": [ - 6, - 0, - 2, - 0, - 1 - ], - "span": [ - 20, - 6, - 9 - ] - }, - { - "path": [ - 6, - 0, - 2, - 0, - 2 - ], - "span": [ - 20, - 10, - 16 - ] - }, - { - "path": [ - 6, - 0, - 2, - 0, - 3 - ], - "span": [ - 20, - 27, - 33 - ] - }, - { - "path": [ - 6, - 0, - 2, - 1 - ], - "span": [ - 21, - 2, - 37 - ] - }, - { - "path": [ - 6, - 0, - 2, - 1, - 1 - ], - "span": [ - 21, - 6, - 9 - ] - }, - { - "path": [ - 6, - 0, - 2, - 1, - 2 - ], - "span": [ - 21, - 10, - 16 - ] - }, - { - "path": [ - 6, - 0, - 2, - 1, - 3 - ], - "span": [ - 21, - 27, - 33 - ] - } - ] - }, - "syntax": "proto3" - }, - "raw-filename": "export.json.tmpl", - "filename": "export.json.tmpl", - "template-dir": "templates", - "service": { - "name": "DummyService", - "method": [ - { - "name": "Hhh", - "input_type": ".dummy.Dummy1", - "output_type": ".dummy.Dummy2", - "options": {} - }, - { - "name": "Iii", - "input_type": ".dummy.Dummy2", - "output_type": ".dummy.Dummy1", - "options": {} - } - ] - }, - "enum": null -} \ No newline at end of file diff --git a/examples/dummy/output/static.txt b/examples/dummy/output/static.txt deleted file mode 100644 index 1a44595..0000000 --- a/examples/dummy/output/static.txt +++ /dev/null @@ -1 +0,0 @@ -This is static text \ No newline at end of file diff --git a/examples/dummy/templates/export.json.tmpl b/examples/dummy/templates/export.json.tmpl deleted file mode 100644 index b7645f6..0000000 --- a/examples/dummy/templates/export.json.tmpl +++ /dev/null @@ -1 +0,0 @@ -{{ . | prettyjson }} \ No newline at end of file diff --git a/examples/dummy/templates/static.txt.tmpl b/examples/dummy/templates/static.txt.tmpl deleted file mode 100644 index 1a44595..0000000 --- a/examples/dummy/templates/static.txt.tmpl +++ /dev/null @@ -1 +0,0 @@ -This is static text \ No newline at end of file diff --git a/examples/enum/Makefile b/examples/enum/Makefile deleted file mode 100644 index 60bc04f..0000000 --- a/examples/enum/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true,all=true:output proto/*.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/enum/output/enum.txt b/examples/enum/output/enum.txt deleted file mode 100644 index 4784749..0000000 --- a/examples/enum/output/enum.txt +++ /dev/null @@ -1,9 +0,0 @@ --red --blue --black --yellow --green --dark --white --gray --orange diff --git a/examples/enum/proto/sample.proto b/examples/enum/proto/sample.proto deleted file mode 100644 index 3d7acd4..0000000 --- a/examples/enum/proto/sample.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; -package Sample; - -enum Colors { - red = 0; - blue = 1; - black = 2; - yellow = 3; - green = 4; - dark = 5; - white = 6; - gray = 7; - orange = 8; -} \ No newline at end of file diff --git a/examples/enum/templates/enum.txt.tmpl b/examples/enum/templates/enum.txt.tmpl deleted file mode 100644 index 9bbc9a7..0000000 --- a/examples/enum/templates/enum.txt.tmpl +++ /dev/null @@ -1,2 +0,0 @@ -{{range $m := "colors" | getEnumValue .Enum }}-{{$m.Name}} -{{end}} \ No newline at end of file diff --git a/examples/flow/Makefile b/examples/flow/Makefile deleted file mode 100644 index 2d37994..0000000 --- a/examples/flow/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true:output ./protos/*.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/flow/output/test/test_grpc_js.js b/examples/flow/output/test/test_grpc_js.js deleted file mode 100644 index 9410b6e..0000000 --- a/examples/flow/output/test/test_grpc_js.js +++ /dev/null @@ -1,311 +0,0 @@ -// @flow -// GENERATED CODE -- DO NOT EDIT! - -import base64 from 'base64-js' -import test_pb from './test_pb' - - -export type TestEnum = {| - ELEMENT_A?: 0; - ELEMENT_B?: 1; -|}; - - -export type TestMessage$TestNestedEnum = {| - ELEMENT_C?: 0; - ELEMENT_D?: 1; -|}; - - -export type TestMessage$TestNestedMessage = { - getS?: () => string; - setS?: (s: string) => void; - getT?: () => number; - setT?: (t: number) => void; -}; - -export type TestMessage = { - getA?: () => string; - setA?: (a: string) => void; - getB?: () => number; - setB?: (b: number) => void; - getC?: () => number; - setC?: (c: number) => void; - getD?: () => number; - setD?: (d: number) => void; - getE?: () => number; - setE?: (e: number) => void; - getNList?: () => Array; - setNList?: (n: Array) => void; - addN?: (n: string) => void; - clearNList?: () => void; - getOList?: () => Array; - setOList?: (o: Array) => void; - addO?: (o: number) => void; - clearOList?: () => void; - getPList?: () => Array; - setPList?: (p: Array) => void; - addP?: (p: number) => void; - clearPList?: () => void; - getQList?: () => Array; - setQList?: (q: Array) => void; - addQ?: (q: number) => void; - clearQList?: () => void; - getRList?: () => Array; - setRList?: (r: Array) => void; - addR?: (r: number) => void; - clearRList?: () => void; - getU?: () => test$TestEnum; - setU?: (u: test$TestEnum) => void; - getV?: () => test$TestMessage$TestNestedEnum; - setV?: (v: test$TestMessage$TestNestedEnum) => void; - getWList?: () => Array; - setWList?: (w: Array) => void; - addW?: (w: test$TestMessage$TestNestedMessage) => void; - clearWList?: () => void; -}; - - - - - -export type TestNoStreamRequest = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; -}; - - - - - -export type TestNoStreamReply = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; - getErrMsg?: () => string; - setErrMsg?: (err_msg: string) => void; -}; - - - - - -export type TestStreamRequestRequest = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; -}; - - - - - -export type TestStreamRequestReply = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; - getErrMsg?: () => string; - setErrMsg?: (err_msg: string) => void; -}; - - - - - -export type TestStreamReplyRequest = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; -}; - - - - - -export type TestStreamReplyReply = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; - getErrMsg?: () => string; - setErrMsg?: (err_msg: string) => void; -}; - - - - - -export type TestStreamBothRequest = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; -}; - - - - - -export type TestStreamBothReply = { - getMessage?: () => test$TestMessage; - setMessage?: (message: test$TestMessage) => void; - clearMessage?: () => void; - hasMessage?: () => boolean; - getErrMsg?: () => string; - setErrMsg?: (err_msg: string) => void; -}; - -const serializeToBase64 = (byteArray: Uint8Array): string => base64.fromByteArray(byteArray) -const deserializeFromBase64 = (base64Encoded: string): Uint8Array => new Uint8Array(base64.toByteArray(base64Encoded)) - - -function serialize_test_TestNoStreamRequest(arg : TestNoStreamRequest): string { - if (!(arg instanceof test_pb.TestNoStreamRequest)) { - throw new Error('Expected argument of type TestNoStreamRequest') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestNoStreamRequest(base64Encoded: string): TestNoStreamRequest { - return test_pb.TestNoStreamRequest.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -function serialize_test_TestNoStreamReply(arg : TestNoStreamReply): string { - if (!(arg instanceof test_pb.TestNoStreamReply)) { - throw new Error('Expected argument of type TestNoStreamReply') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestNoStreamReply(base64Encoded: string): TestNoStreamReply { - return test_pb.TestNoStreamReply.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - - -function serialize_test_TestStreamRequestRequest(arg : TestStreamRequestRequest): string { - if (!(arg instanceof test_pb.TestStreamRequestRequest)) { - throw new Error('Expected argument of type TestStreamRequestRequest') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamRequestRequest(base64Encoded: string): TestStreamRequestRequest { - return test_pb.TestStreamRequestRequest.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -function serialize_test_TestStreamRequestReply(arg : TestStreamRequestReply): string { - if (!(arg instanceof test_pb.TestStreamRequestReply)) { - throw new Error('Expected argument of type TestStreamRequestReply') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamRequestReply(base64Encoded: string): TestStreamRequestReply { - return test_pb.TestStreamRequestReply.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - - -function serialize_test_TestStreamReplyRequest(arg : TestStreamReplyRequest): string { - if (!(arg instanceof test_pb.TestStreamReplyRequest)) { - throw new Error('Expected argument of type TestStreamReplyRequest') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamReplyRequest(base64Encoded: string): TestStreamReplyRequest { - return test_pb.TestStreamReplyRequest.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -function serialize_test_TestStreamReplyReply(arg : TestStreamReplyReply): string { - if (!(arg instanceof test_pb.TestStreamReplyReply)) { - throw new Error('Expected argument of type TestStreamReplyReply') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamReplyReply(base64Encoded: string): TestStreamReplyReply { - return test_pb.TestStreamReplyReply.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - - -function serialize_test_TestStreamBothRequest(arg : TestStreamBothRequest): string { - if (!(arg instanceof test_pb.TestStreamBothRequest)) { - throw new Error('Expected argument of type TestStreamBothRequest') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamBothRequest(base64Encoded: string): TestStreamBothRequest { - return test_pb.TestStreamBothRequest.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -function serialize_test_TestStreamBothReply(arg : TestStreamBothReply): string { - if (!(arg instanceof test_pb.TestStreamBothReply)) { - throw new Error('Expected argument of type TestStreamBothReply') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_test_TestStreamBothReply(base64Encoded: string): TestStreamBothReply { - return test_pb.TestStreamBothReply.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - - -export default { - - TestService: { - - testNoStream: { - path: '/test.TestService/TestNoStream', - requestStream: false, - responseStream: false, - requestType: test_pb.TestNoStreamRequest, - responseType: test_pb.TestNoStreamReply, - requestSerialize: serialize_test_TestNoStreamRequest, - requestDeserialize: deserialize_test_TestNoStreamRequest, - responseSerialize: serialize_test_TestNoStreamReply, - responseDeserialize: deserialize_test_TestNoStreamReply, - }, - testStreamRequest: { - path: '/test.TestService/TestStreamRequest', - requestStream: true, - responseStream: false, - requestType: test_pb.TestStreamRequestRequest, - responseType: test_pb.TestStreamRequestReply, - requestSerialize: serialize_test_TestStreamRequestRequest, - requestDeserialize: deserialize_test_TestStreamRequestRequest, - responseSerialize: serialize_test_TestStreamRequestReply, - responseDeserialize: deserialize_test_TestStreamRequestReply, - }, - testStreamReply: { - path: '/test.TestService/TestStreamReply', - requestStream: false, - responseStream: true, - requestType: test_pb.TestStreamReplyRequest, - responseType: test_pb.TestStreamReplyReply, - requestSerialize: serialize_test_TestStreamReplyRequest, - requestDeserialize: deserialize_test_TestStreamReplyRequest, - responseSerialize: serialize_test_TestStreamReplyReply, - responseDeserialize: deserialize_test_TestStreamReplyReply, - }, - testStreamBoth: { - path: '/test.TestService/TestStreamBoth', - requestStream: true, - responseStream: true, - requestType: test_pb.TestStreamBothRequest, - responseType: test_pb.TestStreamBothReply, - requestSerialize: serialize_test_TestStreamBothRequest, - requestDeserialize: deserialize_test_TestStreamBothRequest, - responseSerialize: serialize_test_TestStreamBothReply, - responseDeserialize: deserialize_test_TestStreamBothReply, - }, - - } - -} diff --git a/examples/flow/protos/test.proto b/examples/flow/protos/test.proto deleted file mode 100644 index 66ea9ac..0000000 --- a/examples/flow/protos/test.proto +++ /dev/null @@ -1,52 +0,0 @@ -syntax = "proto3"; - -package test; - -option go_package = "github.com/united-drivers/models/go/test;testpb"; - -service TestService { - rpc TestNoStream(TestNoStreamRequest) returns (TestNoStreamReply); - rpc TestStreamRequest(stream TestStreamRequestRequest) returns (TestStreamRequestReply); - rpc TestStreamReply(TestStreamReplyRequest) returns (stream TestStreamReplyReply); - rpc TestStreamBoth(stream TestStreamBothRequest) returns (stream TestStreamBothReply); -} - -enum TestEnum { - ELEMENT_A = 0; - ELEMENT_B = 1; -} - -message TestMessage { - string a = 1; - int32 b = 2; - int64 c = 3; - float d = 4; - double e = 5; - repeated string n = 14; - repeated int32 o = 15; - repeated int64 p = 16; - repeated float q = 17; - repeated double r = 18; - - message TestNestedMessage { - string s = 1; - int32 t = 2; - } - enum TestNestedEnum { - ELEMENT_C = 0; - ELEMENT_D = 1; - } - - TestEnum u = 19; - TestNestedEnum v = 20; - repeated TestNestedMessage w = 21; -} - -message TestNoStreamRequest { TestMessage message = 1; } -message TestNoStreamReply { TestMessage message = 1; string err_msg = 2; } -message TestStreamRequestRequest { TestMessage message = 1; } -message TestStreamRequestReply { TestMessage message = 1; string err_msg = 2; } -message TestStreamReplyRequest { TestMessage message = 1; } -message TestStreamReplyReply { TestMessage message = 1; string err_msg = 2; } -message TestStreamBothRequest { TestMessage message = 1; } -message TestStreamBothReply { TestMessage message = 1; string err_msg = 2; } diff --git a/examples/flow/templates/%7B%7B.File.Package%7D%7D/%7B%7B.File.Package%7D%7D_grpc_js.js.tmpl b/examples/flow/templates/%7B%7B.File.Package%7D%7D/%7B%7B.File.Package%7D%7D_grpc_js.js.tmpl deleted file mode 100644 index e4cc024..0000000 --- a/examples/flow/templates/%7B%7B.File.Package%7D%7D/%7B%7B.File.Package%7D%7D_grpc_js.js.tmpl +++ /dev/null @@ -1,110 +0,0 @@ -// @flow -// GENERATED CODE -- DO NOT EDIT! - -{{- $Package:=.File.Package}} - -import base64 from 'base64-js' -import {{$Package}}_pb from './{{$Package}}_pb' - -{{- range .File.Dependency}} -import {{. | replace "/" "_" | trimSuffix ".proto" }}_pb from '../{{. | trimSuffix ".proto" }}_pb' -{{- end}} - -{{- define "fieldMethods"}} - {{- if isFieldRepeated .}} - get{{.Name | camelCase}}List?: () => {{. | jsType}}; - set{{.Name | camelCase}}List?: ({{.Name}}: {{. | jsType}}) => void; - add{{.Name | camelCase}}?: ({{.Name}}: {{. | jsType | trimPrefix "Array<" | trimSuffix ">"}}) => void; - clear{{.Name | camelCase}}List?: () => void; - {{- else}} - get{{.Name | camelCase}}?: () => {{. | jsType}}; - set{{.Name | camelCase}}?: ({{.Name}}: {{. | jsType}}) => void; - - {{- if isFieldMessage .}} - clear{{.Name | camelCase}}?: () => void; - has{{.Name | camelCase}}?: () => boolean; - {{- end}} - {{- end}} -{{- end}} - -{{range .File.EnumType}} -export type {{.Name}} = {| - {{- range .Value}} - {{.Name}}?: {{.Number}}; - {{- end}} -|}; -{{- end}} - -{{- range .File.MessageType}} -{{- $MessageType := .Name}} - -{{range .EnumType}} -export type {{$MessageType}}${{.Name}} = {| - {{- range .Value}} - {{.Name}}?: {{.Number}}; - {{- end}} -|}; -{{- end}} - -{{range .NestedType}} -export type {{$MessageType}}${{.Name}} = { - {{- range .Field}} - {{- template "fieldMethods" .}} - {{- end}} -}; -{{- end}} - -export type {{.Name}} = { - {{- range .Field}} - {{- template "fieldMethods" .}} - {{- end}} -}; - -{{- end}} - -const serializeToBase64 = (byteArray: Uint8Array): string => base64.fromByteArray(byteArray) -const deserializeFromBase64 = (base64Encoded: string): Uint8Array => new Uint8Array(base64.toByteArray(base64Encoded)) - -{{range .File.Service}}{{range .Method}} -function serialize_{{$Package}}_{{.InputType | shortType}}(arg : {{.InputType | shortType}}): string { - if (!(arg instanceof {{$Package}}_pb.{{.InputType | shortType}})) { - throw new Error('Expected argument of type {{.InputType | shortType}}') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_{{$Package}}_{{.InputType | shortType}}(base64Encoded: string): {{.InputType | shortType}} { - return {{$Package}}_pb.{{.InputType | shortType}}.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -function serialize_{{$Package}}_{{.OutputType | shortType}}(arg : {{.OutputType | shortType}}): string { - if (!(arg instanceof {{$Package}}_pb.{{.OutputType | shortType}})) { - throw new Error('Expected argument of type {{.OutputType | shortType}}') - } - return serializeToBase64(arg.serializeBinary()) -} - -function deserialize_{{$Package}}_{{.OutputType | shortType}}(base64Encoded: string): {{.OutputType | shortType}} { - return {{$Package}}_pb.{{.OutputType | shortType}}.deserializeBinary(deserializeFromBase64(base64Encoded)) -} - -{{end}}{{end}} -export default { - {{range .File.Service}} - {{.Name}}: { - {{$serviceName:=.Name}} - {{range .Method}}{{.Name | lowerCamelCase}}: { - path: '/{{$Package}}.{{$serviceName}}/{{.Name}}', - requestStream: {{.ClientStreaming | default "false"}}, - responseStream: {{.ServerStreaming | default "false"}}, - requestType: {{$Package}}_pb.{{.InputType | shortType}}, - responseType: {{$Package}}_pb.{{.OutputType | shortType}}, - requestSerialize: serialize_{{$Package}}_{{.InputType | shortType}}, - requestDeserialize: deserialize_{{$Package}}_{{.InputType | shortType}}, - responseSerialize: serialize_{{$Package}}_{{.OutputType | shortType}}, - responseDeserialize: deserialize_{{$Package}}_{{.OutputType | shortType}}, - }, - {{end}} - } - {{end}} -} diff --git a/examples/go-generate/Makefile b/examples/go-generate/Makefile deleted file mode 100644 index 503178b..0000000 --- a/examples/go-generate/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -all: - go generate diff --git a/examples/go-generate/example.go b/examples/go-generate/example.go deleted file mode 100644 index f1dfe3c..0000000 --- a/examples/go-generate/example.go +++ /dev/null @@ -1,4 +0,0 @@ -package example - -//go:generate protoc --go_out=./gen/ example.proto -//go:generate protoc --gotemplate_out=./gen/ example.proto diff --git a/examples/go-generate/example.proto b/examples/go-generate/example.proto deleted file mode 100644 index 42f70fd..0000000 --- a/examples/go-generate/example.proto +++ /dev/null @@ -1,16 +0,0 @@ -syntax = "proto3"; - -package example; - -service Sum { - rpc Sum(SumRequest) returns (SumReply) {}; -} - -message SumRequest { - int32 a = 1; - int32 b = 2; -} - -message SumReply { - int32 c = 1; -} \ No newline at end of file diff --git a/examples/go-generate/gen/doc.gen.go b/examples/go-generate/gen/doc.gen.go deleted file mode 100644 index 1277fcf..0000000 --- a/examples/go-generate/gen/doc.gen.go +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated by protoc-gen-gotemplate -package example - -// Methods -// ------- -// * Sum -// -// Message types -// ------------- -// * SumRequest -// * SumReply diff --git a/examples/go-generate/gen/example.pb.go b/examples/go-generate/gen/example.pb.go deleted file mode 100644 index 800386a..0000000 --- a/examples/go-generate/gen/example.pb.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by protoc-gen-go. -// source: example.proto -// DO NOT EDIT! - -/* -Package example is a generated protocol buffer package. - -It is generated from these files: - example.proto - -It has these top-level messages: - SumRequest - SumReply -*/ -package example - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type SumRequest struct { - A int32 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` - B int32 `protobuf:"varint,2,opt,name=b" json:"b,omitempty"` -} - -func (m *SumRequest) Reset() { *m = SumRequest{} } -func (m *SumRequest) String() string { return proto.CompactTextString(m) } -func (*SumRequest) ProtoMessage() {} -func (*SumRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *SumRequest) GetA() int32 { - if m != nil { - return m.A - } - return 0 -} - -func (m *SumRequest) GetB() int32 { - if m != nil { - return m.B - } - return 0 -} - -type SumReply struct { - C int32 `protobuf:"varint,1,opt,name=c" json:"c,omitempty"` -} - -func (m *SumReply) Reset() { *m = SumReply{} } -func (m *SumReply) String() string { return proto.CompactTextString(m) } -func (*SumReply) ProtoMessage() {} -func (*SumReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *SumReply) GetC() int32 { - if m != nil { - return m.C - } - return 0 -} - -func init() { - proto.RegisterType((*SumRequest)(nil), "example.SumRequest") - proto.RegisterType((*SumReply)(nil), "example.SumReply") -} - -func init() { proto.RegisterFile("example.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 124 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4d, 0xad, 0x48, 0xcc, - 0x2d, 0xc8, 0x49, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x87, 0x72, 0x95, 0x34, 0xb8, - 0xb8, 0x82, 0x4b, 0x73, 0x83, 0x52, 0x0b, 0x4b, 0x53, 0x8b, 0x4b, 0x84, 0x78, 0xb8, 0x18, 0x13, - 0x25, 0x18, 0x15, 0x18, 0x35, 0x58, 0x83, 0x18, 0x13, 0x41, 0xbc, 0x24, 0x09, 0x26, 0x08, 0x2f, - 0x49, 0x49, 0x82, 0x8b, 0x03, 0xac, 0xb2, 0x20, 0xa7, 0x12, 0x24, 0x93, 0x0c, 0x53, 0x97, 0x6c, - 0x64, 0xc6, 0xc5, 0x1c, 0x5c, 0x9a, 0x2b, 0xa4, 0x0f, 0xa1, 0x84, 0xf5, 0x60, 0x56, 0x21, 0x0c, - 0x96, 0x12, 0x44, 0x15, 0x2c, 0xc8, 0xa9, 0x54, 0x62, 0x48, 0x62, 0x03, 0xbb, 0xc5, 0x18, 0x10, - 0x00, 0x00, 0xff, 0xff, 0x2b, 0xf1, 0xe9, 0x56, 0x9c, 0x00, 0x00, 0x00, -} diff --git a/examples/go-generate/templates/doc.gen.go.tmpl b/examples/go-generate/templates/doc.gen.go.tmpl deleted file mode 100644 index c823669..0000000 --- a/examples/go-generate/templates/doc.gen.go.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated by protoc-gen-gotemplate -package {{.File.Package}} - -// Methods -// ------- -{{- range .Service.Method}} -// * {{.Name}} -{{- end}} -// -// Message types -// ------------- -{{- range .File.MessageType}} -// * {{.Name}} -{{- end}} diff --git a/examples/go-kit/.gitignore b/examples/go-kit/.gitignore deleted file mode 100644 index dc01103..0000000 --- a/examples/go-kit/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/vendor/ -/server diff --git a/examples/go-kit/Makefile b/examples/go-kit/Makefile deleted file mode 100644 index 409f4f4..0000000 --- a/examples/go-kit/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -SOURCES := $(shell find . -name "*.proto" -not -path ./vendor/\*) - -TARGETS_GO := $(foreach source, $(SOURCES), $(source)_go) -TARGETS_TMPL := $(foreach source, $(SOURCES), $(source)_tmpl) - -service_name = $(word 2,$(subst /, ,$1)) - -.PHONY: build -build: server - -server: $(TARGETS_GO) $(TARGETS_TMPL) - glide install - go build -o server . - -$(TARGETS_GO): %_go: - protoc --go_out=plugins=grpc:. "$*" - @mkdir -p services/$(call service_name,$*)/gen/pb - @mv ./services/$(call service_name,$*)/$(call service_name,$*).pb.go ./services/$(call service_name,$*)/gen/pb/pb.go - -$(TARGETS_TMPL): %_tmpl: - @mkdir -p $(dir $*)gen - protoc -I. --gotemplate_out=destination_dir=services/$(call service_name,$*)/gen,template_dir=templates:services "$*" - @rm -rf services/services # need to investigate why this directory is created - gofmt -w $(dir $*)gen diff --git a/examples/go-kit/glide.lock b/examples/go-kit/glide.lock deleted file mode 100644 index 9b26bce..0000000 --- a/examples/go-kit/glide.lock +++ /dev/null @@ -1,48 +0,0 @@ -hash: e225ab17b49f8d6601b2bd813d43ad34ee04380d29c278c11919efd454c7b2d7 -updated: 2017-03-16T17:03:34.437968115+01:00 -imports: -- name: github.com/dgrijalva/jwt-go - version: 2268707a8f0843315e2004ee4f1d021dc08baedf -- name: github.com/go-kit/kit - version: fadad6fffe0466b19df9efd9acde5c9a52df5fa4 - subpackages: - - auth/jwt - - endpoint - - log - - transport/grpc - - transport/http -- name: github.com/go-logfmt/logfmt - version: 390ab7935ee28ec6b286364bba9b4dd6410cb3d5 -- name: github.com/go-stack/stack - version: 100eb0c0a9c5b306ca2fb4f165df21d80ada4b82 -- name: github.com/golang/protobuf - version: c9c7427a2a70d2eb3bafa0ab2dc163e45f143317 - subpackages: - - proto -- name: github.com/gorilla/handlers - version: 3a5767ca75ece5f7f1440b1d16975247f8d8b221 -- name: github.com/kr/logfmt - version: b84e30acd515aadc4b783ad4ff83aff3299bdfe0 -- name: golang.org/x/net - version: a6577fac2d73be281a500b310739095313165611 - subpackages: - - context - - context/ctxhttp - - http2 - - http2/hpack - - idna - - internal/timeseries - - lex/httplex - - trace -- name: google.golang.org/grpc - version: 777daa17ff9b5daef1cfdf915088a2ada3332bf0 - subpackages: - - codes - - credentials - - grpclog - - internal - - metadata - - naming - - peer - - transport -testImports: [] diff --git a/examples/go-kit/glide.yaml b/examples/go-kit/glide.yaml deleted file mode 100644 index a7ec331..0000000 --- a/examples/go-kit/glide.yaml +++ /dev/null @@ -1,20 +0,0 @@ -package: moul.io/protoc-gen-gotemplate/examples/go-kit -import: -- package: github.com/go-kit/kit - subpackages: - - auth/jwt - - endpoint - - log - - transport/grpc - - transport/http -- package: github.com/golang/protobuf - subpackages: - - proto - version: c9c7427a2a70d2eb3bafa0ab2dc163e45f143317 -- package: github.com/gorilla/handlers -- package: golang.org/x/net - subpackages: - - context - - http2 - version: a6577fac2d73be281a500b310739095313165611 -- package: google.golang.org/grpc diff --git a/examples/go-kit/main.go b/examples/go-kit/main.go deleted file mode 100644 index 1de10ff..0000000 --- a/examples/go-kit/main.go +++ /dev/null @@ -1,93 +0,0 @@ -package main - -import ( - "fmt" - "net" - "net/http" - "os" - "os/signal" - "syscall" - - "github.com/go-kit/kit/log" - "github.com/gorilla/handlers" - "google.golang.org/grpc" - - session_svc "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session" - session_endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/endpoints" - session_pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" - session_grpctransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/transports/grpc" - session_httptransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/transports/http" - - sprint_svc "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint" - sprint_endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/endpoints" - sprint_pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" - sprint_grpctransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/transports/grpc" - sprint_httptransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/transports/http" - - user_svc "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user" - user_endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/endpoints" - user_pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" - user_grpctransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/transports/grpc" - user_httptransport "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/transports/http" -) - -func main() { - mux := http.NewServeMux() - errc := make(chan error) - s := grpc.NewServer() - var logger log.Logger - { - logger = log.NewLogfmtLogger(os.Stdout) - logger = log.With(logger, "ts", log.DefaultTimestampUTC) - logger = log.With(logger, "caller", log.DefaultCaller) - } - - // initialize services - { - svc := session_svc.New() - endpoints := session_endpoints.MakeEndpoints(svc) - srv := session_grpctransport.MakeGRPCServer(endpoints) - session_pb.RegisterSessionServiceServer(s, srv) - session_httptransport.RegisterHandlers(svc, mux, endpoints) - } - { - svc := sprint_svc.New() - endpoints := sprint_endpoints.MakeEndpoints(svc) - srv := sprint_grpctransport.MakeGRPCServer(endpoints) - sprint_pb.RegisterSprintServiceServer(s, srv) - sprint_httptransport.RegisterHandlers(svc, mux, endpoints) - } - { - svc := user_svc.New() - endpoints := user_endpoints.MakeEndpoints(svc) - srv := user_grpctransport.MakeGRPCServer(endpoints) - user_pb.RegisterUserServiceServer(s, srv) - user_httptransport.RegisterHandlers(svc, mux, endpoints) - } - - // start servers - go func() { - c := make(chan os.Signal, 1) - signal.Notify(c, syscall.SIGINT, syscall.SIGTERM) - errc <- fmt.Errorf("%s", <-c) - }() - - go func() { - logger := log.With(logger, "transport", "HTTP") - logger.Log("addr", ":8000") - errc <- http.ListenAndServe(":8000", handlers.LoggingHandler(os.Stderr, mux)) - }() - - go func() { - logger := log.With(logger, "transport", "gRPC") - ln, err := net.Listen("tcp", ":9000") - if err != nil { - errc <- err - return - } - logger.Log("addr", ":9000") - errc <- s.Serve(ln) - }() - - logger.Log("exit", <-errc) -} diff --git a/examples/go-kit/services/session/gen/README.md b/examples/go-kit/services/session/gen/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/examples/go-kit/services/session/gen/client/grpc/client.go b/examples/go-kit/services/session/gen/client/grpc/client.go deleted file mode 100644 index d09c75c..0000000 --- a/examples/go-kit/services/session/gen/client/grpc/client.go +++ /dev/null @@ -1,45 +0,0 @@ -package session_clientgrpc - -import ( - context "context" - - jwt "github.com/go-kit/kit/auth/jwt" - "github.com/go-kit/kit/endpoint" - "github.com/go-kit/kit/log" - grpctransport "github.com/go-kit/kit/transport/grpc" - "google.golang.org/grpc" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -func New(conn *grpc.ClientConn, logger log.Logger) pb.SessionServiceServer { - - var loginEndpoint endpoint.Endpoint - { - loginEndpoint = grpctransport.NewClient( - conn, - "session.SessionService", - "Login", - EncodeLoginRequest, - DecodeLoginResponse, - pb.LoginResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - return &endpoints.Endpoints{ - - LoginEndpoint: loginEndpoint, - } -} - -func EncodeLoginRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.LoginRequest) - return req, nil -} - -func DecodeLoginResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.LoginResponse) - return response, nil -} diff --git a/examples/go-kit/services/session/gen/endpoints/endpoints.go b/examples/go-kit/services/session/gen/endpoints/endpoints.go deleted file mode 100644 index b7196c2..0000000 --- a/examples/go-kit/services/session/gen/endpoints/endpoints.go +++ /dev/null @@ -1,46 +0,0 @@ -package session_endpoints - -import ( - context "context" - "fmt" - - "github.com/go-kit/kit/endpoint" - oldcontext "golang.org/x/net/context" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -var _ = endpoint.Chain -var _ = fmt.Errorf -var _ = context.Background - -type StreamEndpoint func(server interface{}, req interface{}) (err error) - -type Endpoints struct { - LoginEndpoint endpoint.Endpoint -} - -func (e *Endpoints) Login(ctx oldcontext.Context, in *pb.LoginRequest) (*pb.LoginResponse, error) { - out, err := e.LoginEndpoint(ctx, in) - if err != nil { - return &pb.LoginResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.LoginResponse), err -} - -func MakeLoginEndpoint(svc pb.SessionServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.LoginRequest) - rep, err := svc.Login(ctx, req) - if err != nil { - return &pb.LoginResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeEndpoints(svc pb.SessionServiceServer) Endpoints { - return Endpoints{ - - LoginEndpoint: MakeLoginEndpoint(svc), - } -} diff --git a/examples/go-kit/services/session/gen/pb/pb.go b/examples/go-kit/services/session/gen/pb/pb.go deleted file mode 100644 index 499933d..0000000 --- a/examples/go-kit/services/session/gen/pb/pb.go +++ /dev/null @@ -1,178 +0,0 @@ -// Code generated by protoc-gen-go. -// source: services/session/session.proto -// DO NOT EDIT! - -/* -Package session is a generated protocol buffer package. - -It is generated from these files: - services/session/session.proto - -It has these top-level messages: - LoginRequest - LoginResponse -*/ -package session - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type LoginRequest struct { - Username string `protobuf:"bytes,1,opt,name=username" json:"username,omitempty"` - Password string `protobuf:"bytes,2,opt,name=password" json:"password,omitempty"` -} - -func (m *LoginRequest) Reset() { *m = LoginRequest{} } -func (m *LoginRequest) String() string { return proto.CompactTextString(m) } -func (*LoginRequest) ProtoMessage() {} -func (*LoginRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *LoginRequest) GetUsername() string { - if m != nil { - return m.Username - } - return "" -} - -func (m *LoginRequest) GetPassword() string { - if m != nil { - return m.Password - } - return "" -} - -type LoginResponse struct { - Token string `protobuf:"bytes,1,opt,name=token" json:"token,omitempty"` - ErrMsg string `protobuf:"bytes,2,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *LoginResponse) Reset() { *m = LoginResponse{} } -func (m *LoginResponse) String() string { return proto.CompactTextString(m) } -func (*LoginResponse) ProtoMessage() {} -func (*LoginResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *LoginResponse) GetToken() string { - if m != nil { - return m.Token - } - return "" -} - -func (m *LoginResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -func init() { - proto.RegisterType((*LoginRequest)(nil), "session.LoginRequest") - proto.RegisterType((*LoginResponse)(nil), "session.LoginResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// Client API for SessionService service - -type SessionServiceClient interface { - Login(ctx context.Context, in *LoginRequest, opts ...grpc.CallOption) (*LoginResponse, error) -} - -type sessionServiceClient struct { - cc *grpc.ClientConn -} - -func NewSessionServiceClient(cc *grpc.ClientConn) SessionServiceClient { - return &sessionServiceClient{cc} -} - -func (c *sessionServiceClient) Login(ctx context.Context, in *LoginRequest, opts ...grpc.CallOption) (*LoginResponse, error) { - out := new(LoginResponse) - err := grpc.Invoke(ctx, "/session.SessionService/Login", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Server API for SessionService service - -type SessionServiceServer interface { - Login(context.Context, *LoginRequest) (*LoginResponse, error) -} - -func RegisterSessionServiceServer(s *grpc.Server, srv SessionServiceServer) { - s.RegisterService(&_SessionService_serviceDesc, srv) -} - -func _SessionService_Login_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(LoginRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(SessionServiceServer).Login(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/session.SessionService/Login", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(SessionServiceServer).Login(ctx, req.(*LoginRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _SessionService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "session.SessionService", - HandlerType: (*SessionServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Login", - Handler: _SessionService_Login_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "services/session/session.proto", -} - -func init() { proto.RegisterFile("services/session/session.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 188 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0x92, 0x2b, 0x4e, 0x2d, 0x2a, - 0xcb, 0x4c, 0x4e, 0x2d, 0xd6, 0x2f, 0x4e, 0x2d, 0x2e, 0xce, 0xcc, 0xcf, 0x83, 0xd1, 0x7a, 0x05, - 0x45, 0xf9, 0x25, 0xf9, 0x42, 0xec, 0x50, 0xae, 0x92, 0x1b, 0x17, 0x8f, 0x4f, 0x7e, 0x7a, 0x66, - 0x5e, 0x50, 0x6a, 0x61, 0x69, 0x6a, 0x71, 0x89, 0x90, 0x14, 0x17, 0x47, 0x69, 0x71, 0x6a, 0x51, - 0x5e, 0x62, 0x6e, 0xaa, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x9c, 0x0f, 0x92, 0x2b, 0x48, - 0x2c, 0x2e, 0x2e, 0xcf, 0x2f, 0x4a, 0x91, 0x60, 0x82, 0xc8, 0xc1, 0xf8, 0x4a, 0x76, 0x5c, 0xbc, - 0x50, 0x73, 0x8a, 0x0b, 0xf2, 0xf3, 0x8a, 0x53, 0x85, 0x44, 0xb8, 0x58, 0x4b, 0xf2, 0xb3, 0x53, - 0xf3, 0xa0, 0xa6, 0x40, 0x38, 0x42, 0xe2, 0x5c, 0xec, 0xa9, 0x45, 0x45, 0xf1, 0xb9, 0xc5, 0xe9, - 0x50, 0x13, 0xd8, 0x52, 0x8b, 0x8a, 0x7c, 0x8b, 0xd3, 0x8d, 0xbc, 0xb8, 0xf8, 0x82, 0x21, 0x4e, - 0x0a, 0x86, 0xb8, 0x5c, 0xc8, 0x82, 0x8b, 0x15, 0x6c, 0xa2, 0x90, 0xa8, 0x1e, 0xcc, 0xed, 0xc8, - 0x2e, 0x95, 0x12, 0x43, 0x17, 0x86, 0x58, 0xac, 0xc4, 0x90, 0xc4, 0x06, 0xf6, 0xa3, 0x31, 0x20, - 0x00, 0x00, 0xff, 0xff, 0x29, 0x3f, 0x91, 0xc7, 0x05, 0x01, 0x00, 0x00, -} diff --git a/examples/go-kit/services/session/gen/transports/grpc/grpc.go b/examples/go-kit/services/session/gen/transports/grpc/grpc.go deleted file mode 100644 index 99b65b3..0000000 --- a/examples/go-kit/services/session/gen/transports/grpc/grpc.go +++ /dev/null @@ -1,65 +0,0 @@ -package session_grpctransport - -import ( - context "context" - "fmt" - - grpctransport "github.com/go-kit/kit/transport/grpc" - oldcontext "golang.org/x/net/context" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -// avoid import errors -var _ = fmt.Errorf - -func MakeGRPCServer(endpoints endpoints.Endpoints) pb.SessionServiceServer { - var options []grpctransport.ServerOption - _ = options - return &grpcServer{ - - login: grpctransport.NewServer( - endpoints.LoginEndpoint, - decodeRequest, - encodeLoginResponse, - options..., - ), - } -} - -type grpcServer struct { - login grpctransport.Handler -} - -func (s *grpcServer) Login(ctx oldcontext.Context, req *pb.LoginRequest) (*pb.LoginResponse, error) { - _, rep, err := s.login.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.LoginResponse), nil -} - -func encodeLoginResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.LoginResponse) - return resp, nil -} - -func decodeRequest(ctx context.Context, grpcReq interface{}) (interface{}, error) { - return grpcReq, nil -} - -type streamHandler interface { - Do(server interface{}, req interface{}) (err error) -} - -type server struct { - e endpoints.StreamEndpoint -} - -func (s server) Do(server interface{}, req interface{}) (err error) { - if err := s.e(server, req); err != nil { - return err - } - return nil -} diff --git a/examples/go-kit/services/session/gen/transports/http/http.go b/examples/go-kit/services/session/gen/transports/http/http.go deleted file mode 100644 index d83c9c5..0000000 --- a/examples/go-kit/services/session/gen/transports/http/http.go +++ /dev/null @@ -1,46 +0,0 @@ -package session_httptransport - -import ( - "context" - "encoding/json" - "log" - "net/http" - - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -var _ = log.Printf -var _ = gokit_endpoint.Chain -var _ = httptransport.NewClient - -func MakeLoginHandler(svc pb.SessionServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeLoginRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeLoginRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.LoginRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error { - return json.NewEncoder(w).Encode(response) -} - -func RegisterHandlers(svc pb.SessionServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - - log.Println("new HTTP endpoint: \"/Login\" (service=Session)") - mux.Handle("/Login", MakeLoginHandler(svc, endpoints.LoginEndpoint)) - - return nil -} diff --git a/examples/go-kit/services/session/service.go b/examples/go-kit/services/session/service.go deleted file mode 100644 index 412b6eb..0000000 --- a/examples/go-kit/services/session/service.go +++ /dev/null @@ -1,19 +0,0 @@ -package sessionsvc - -import ( - "fmt" - - "golang.org/x/net/context" - - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -type Service struct{} - -func New() pb.SessionServiceServer { - return &Service{} -} - -func (svc *Service) Login(ctx context.Context, in *pb.LoginRequest) (*pb.LoginResponse, error) { - return nil, fmt.Errorf("not implemented") -} diff --git a/examples/go-kit/services/session/session.proto b/examples/go-kit/services/session/session.proto deleted file mode 100644 index 8c582e9..0000000 --- a/examples/go-kit/services/session/session.proto +++ /dev/null @@ -1,17 +0,0 @@ -syntax = "proto3"; - -package session; - -service SessionService { - rpc Login(LoginRequest) returns (LoginResponse) {} -} - -message LoginRequest { - string username = 1; - string password = 2; -} - -message LoginResponse { - string token = 1; - string err_msg = 2; -} diff --git a/examples/go-kit/services/sprint/gen/README.md b/examples/go-kit/services/sprint/gen/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/examples/go-kit/services/sprint/gen/client/grpc/client.go b/examples/go-kit/services/sprint/gen/client/grpc/client.go deleted file mode 100644 index ef39bac..0000000 --- a/examples/go-kit/services/sprint/gen/client/grpc/client.go +++ /dev/null @@ -1,95 +0,0 @@ -package sprint_clientgrpc - -import ( - context "context" - - jwt "github.com/go-kit/kit/auth/jwt" - "github.com/go-kit/kit/endpoint" - "github.com/go-kit/kit/log" - grpctransport "github.com/go-kit/kit/transport/grpc" - "google.golang.org/grpc" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" -) - -func New(conn *grpc.ClientConn, logger log.Logger) pb.SprintServiceServer { - - var addsprintEndpoint endpoint.Endpoint - { - addsprintEndpoint = grpctransport.NewClient( - conn, - "sprint.SprintService", - "AddSprint", - EncodeAddSprintRequest, - DecodeAddSprintResponse, - pb.AddSprintResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - var closesprintEndpoint endpoint.Endpoint - { - closesprintEndpoint = grpctransport.NewClient( - conn, - "sprint.SprintService", - "CloseSprint", - EncodeCloseSprintRequest, - DecodeCloseSprintResponse, - pb.CloseSprintResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - var getsprintEndpoint endpoint.Endpoint - { - getsprintEndpoint = grpctransport.NewClient( - conn, - "sprint.SprintService", - "GetSprint", - EncodeGetSprintRequest, - DecodeGetSprintResponse, - pb.GetSprintResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - return &endpoints.Endpoints{ - - AddSprintEndpoint: addsprintEndpoint, - - CloseSprintEndpoint: closesprintEndpoint, - - GetSprintEndpoint: getsprintEndpoint, - } -} - -func EncodeAddSprintRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.AddSprintRequest) - return req, nil -} - -func DecodeAddSprintResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.AddSprintResponse) - return response, nil -} - -func EncodeCloseSprintRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.CloseSprintRequest) - return req, nil -} - -func DecodeCloseSprintResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.CloseSprintResponse) - return response, nil -} - -func EncodeGetSprintRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.GetSprintRequest) - return req, nil -} - -func DecodeGetSprintResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.GetSprintResponse) - return response, nil -} diff --git a/examples/go-kit/services/sprint/gen/endpoints/endpoints.go b/examples/go-kit/services/sprint/gen/endpoints/endpoints.go deleted file mode 100644 index 6db874c..0000000 --- a/examples/go-kit/services/sprint/gen/endpoints/endpoints.go +++ /dev/null @@ -1,92 +0,0 @@ -package sprint_endpoints - -import ( - context "context" - "fmt" - - "github.com/go-kit/kit/endpoint" - oldcontext "golang.org/x/net/context" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" -) - -var _ = endpoint.Chain -var _ = fmt.Errorf -var _ = context.Background - -type StreamEndpoint func(server interface{}, req interface{}) (err error) - -type Endpoints struct { - AddSprintEndpoint endpoint.Endpoint - - CloseSprintEndpoint endpoint.Endpoint - - GetSprintEndpoint endpoint.Endpoint -} - -func (e *Endpoints) AddSprint(ctx oldcontext.Context, in *pb.AddSprintRequest) (*pb.AddSprintResponse, error) { - out, err := e.AddSprintEndpoint(ctx, in) - if err != nil { - return &pb.AddSprintResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.AddSprintResponse), err -} - -func (e *Endpoints) CloseSprint(ctx oldcontext.Context, in *pb.CloseSprintRequest) (*pb.CloseSprintResponse, error) { - out, err := e.CloseSprintEndpoint(ctx, in) - if err != nil { - return &pb.CloseSprintResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.CloseSprintResponse), err -} - -func (e *Endpoints) GetSprint(ctx oldcontext.Context, in *pb.GetSprintRequest) (*pb.GetSprintResponse, error) { - out, err := e.GetSprintEndpoint(ctx, in) - if err != nil { - return &pb.GetSprintResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.GetSprintResponse), err -} - -func MakeAddSprintEndpoint(svc pb.SprintServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.AddSprintRequest) - rep, err := svc.AddSprint(ctx, req) - if err != nil { - return &pb.AddSprintResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeCloseSprintEndpoint(svc pb.SprintServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.CloseSprintRequest) - rep, err := svc.CloseSprint(ctx, req) - if err != nil { - return &pb.CloseSprintResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeGetSprintEndpoint(svc pb.SprintServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.GetSprintRequest) - rep, err := svc.GetSprint(ctx, req) - if err != nil { - return &pb.GetSprintResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeEndpoints(svc pb.SprintServiceServer) Endpoints { - return Endpoints{ - - AddSprintEndpoint: MakeAddSprintEndpoint(svc), - - CloseSprintEndpoint: MakeCloseSprintEndpoint(svc), - - GetSprintEndpoint: MakeGetSprintEndpoint(svc), - } -} diff --git a/examples/go-kit/services/sprint/gen/pb/pb.go b/examples/go-kit/services/sprint/gen/pb/pb.go deleted file mode 100644 index 81c7f94..0000000 --- a/examples/go-kit/services/sprint/gen/pb/pb.go +++ /dev/null @@ -1,357 +0,0 @@ -// Code generated by protoc-gen-go. -// source: services/sprint/sprint.proto -// DO NOT EDIT! - -/* -Package sprint is a generated protocol buffer package. - -It is generated from these files: - services/sprint/sprint.proto - -It has these top-level messages: - AddSprintRequest - AddSprintResponse - CloseSprintRequest - CloseSprintResponse - GetSprintRequest - GetSprintResponse - Sprint -*/ -package sprint - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type AddSprintRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` -} - -func (m *AddSprintRequest) Reset() { *m = AddSprintRequest{} } -func (m *AddSprintRequest) String() string { return proto.CompactTextString(m) } -func (*AddSprintRequest) ProtoMessage() {} -func (*AddSprintRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *AddSprintRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -type AddSprintResponse struct { - Sprint *Sprint `protobuf:"bytes,1,opt,name=sprint" json:"sprint,omitempty"` - ErrMsg string `protobuf:"bytes,2,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *AddSprintResponse) Reset() { *m = AddSprintResponse{} } -func (m *AddSprintResponse) String() string { return proto.CompactTextString(m) } -func (*AddSprintResponse) ProtoMessage() {} -func (*AddSprintResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *AddSprintResponse) GetSprint() *Sprint { - if m != nil { - return m.Sprint - } - return nil -} - -func (m *AddSprintResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -type CloseSprintRequest struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` -} - -func (m *CloseSprintRequest) Reset() { *m = CloseSprintRequest{} } -func (m *CloseSprintRequest) String() string { return proto.CompactTextString(m) } -func (*CloseSprintRequest) ProtoMessage() {} -func (*CloseSprintRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -func (m *CloseSprintRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type CloseSprintResponse struct { - ErrMsg string `protobuf:"bytes,1,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *CloseSprintResponse) Reset() { *m = CloseSprintResponse{} } -func (m *CloseSprintResponse) String() string { return proto.CompactTextString(m) } -func (*CloseSprintResponse) ProtoMessage() {} -func (*CloseSprintResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *CloseSprintResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -type GetSprintRequest struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` -} - -func (m *GetSprintRequest) Reset() { *m = GetSprintRequest{} } -func (m *GetSprintRequest) String() string { return proto.CompactTextString(m) } -func (*GetSprintRequest) ProtoMessage() {} -func (*GetSprintRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } - -func (m *GetSprintRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type GetSprintResponse struct { - Sprint *Sprint `protobuf:"bytes,1,opt,name=sprint" json:"sprint,omitempty"` - ErrMsg string `protobuf:"bytes,2,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *GetSprintResponse) Reset() { *m = GetSprintResponse{} } -func (m *GetSprintResponse) String() string { return proto.CompactTextString(m) } -func (*GetSprintResponse) ProtoMessage() {} -func (*GetSprintResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } - -func (m *GetSprintResponse) GetSprint() *Sprint { - if m != nil { - return m.Sprint - } - return nil -} - -func (m *GetSprintResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -type Sprint struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` - CreatedAt uint32 `protobuf:"varint,2,opt,name=created_at,json=createdAt" json:"created_at,omitempty"` - Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` -} - -func (m *Sprint) Reset() { *m = Sprint{} } -func (m *Sprint) String() string { return proto.CompactTextString(m) } -func (*Sprint) ProtoMessage() {} -func (*Sprint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } - -func (m *Sprint) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *Sprint) GetCreatedAt() uint32 { - if m != nil { - return m.CreatedAt - } - return 0 -} - -func (m *Sprint) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func init() { - proto.RegisterType((*AddSprintRequest)(nil), "sprint.AddSprintRequest") - proto.RegisterType((*AddSprintResponse)(nil), "sprint.AddSprintResponse") - proto.RegisterType((*CloseSprintRequest)(nil), "sprint.CloseSprintRequest") - proto.RegisterType((*CloseSprintResponse)(nil), "sprint.CloseSprintResponse") - proto.RegisterType((*GetSprintRequest)(nil), "sprint.GetSprintRequest") - proto.RegisterType((*GetSprintResponse)(nil), "sprint.GetSprintResponse") - proto.RegisterType((*Sprint)(nil), "sprint.Sprint") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// Client API for SprintService service - -type SprintServiceClient interface { - AddSprint(ctx context.Context, in *AddSprintRequest, opts ...grpc.CallOption) (*AddSprintResponse, error) - CloseSprint(ctx context.Context, in *CloseSprintRequest, opts ...grpc.CallOption) (*CloseSprintResponse, error) - GetSprint(ctx context.Context, in *GetSprintRequest, opts ...grpc.CallOption) (*GetSprintResponse, error) -} - -type sprintServiceClient struct { - cc *grpc.ClientConn -} - -func NewSprintServiceClient(cc *grpc.ClientConn) SprintServiceClient { - return &sprintServiceClient{cc} -} - -func (c *sprintServiceClient) AddSprint(ctx context.Context, in *AddSprintRequest, opts ...grpc.CallOption) (*AddSprintResponse, error) { - out := new(AddSprintResponse) - err := grpc.Invoke(ctx, "/sprint.SprintService/AddSprint", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *sprintServiceClient) CloseSprint(ctx context.Context, in *CloseSprintRequest, opts ...grpc.CallOption) (*CloseSprintResponse, error) { - out := new(CloseSprintResponse) - err := grpc.Invoke(ctx, "/sprint.SprintService/CloseSprint", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *sprintServiceClient) GetSprint(ctx context.Context, in *GetSprintRequest, opts ...grpc.CallOption) (*GetSprintResponse, error) { - out := new(GetSprintResponse) - err := grpc.Invoke(ctx, "/sprint.SprintService/GetSprint", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Server API for SprintService service - -type SprintServiceServer interface { - AddSprint(context.Context, *AddSprintRequest) (*AddSprintResponse, error) - CloseSprint(context.Context, *CloseSprintRequest) (*CloseSprintResponse, error) - GetSprint(context.Context, *GetSprintRequest) (*GetSprintResponse, error) -} - -func RegisterSprintServiceServer(s *grpc.Server, srv SprintServiceServer) { - s.RegisterService(&_SprintService_serviceDesc, srv) -} - -func _SprintService_AddSprint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddSprintRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(SprintServiceServer).AddSprint(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/sprint.SprintService/AddSprint", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(SprintServiceServer).AddSprint(ctx, req.(*AddSprintRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _SprintService_CloseSprint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CloseSprintRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(SprintServiceServer).CloseSprint(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/sprint.SprintService/CloseSprint", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(SprintServiceServer).CloseSprint(ctx, req.(*CloseSprintRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _SprintService_GetSprint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetSprintRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(SprintServiceServer).GetSprint(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/sprint.SprintService/GetSprint", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(SprintServiceServer).GetSprint(ctx, req.(*GetSprintRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _SprintService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "sprint.SprintService", - HandlerType: (*SprintServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "AddSprint", - Handler: _SprintService_AddSprint_Handler, - }, - { - MethodName: "CloseSprint", - Handler: _SprintService_CloseSprint_Handler, - }, - { - MethodName: "GetSprint", - Handler: _SprintService_GetSprint_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "services/sprint/sprint.proto", -} - -func init() { proto.RegisterFile("services/sprint/sprint.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 290 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xac, 0x52, 0x4d, 0x4b, 0xc3, 0x40, - 0x10, 0x6d, 0xaa, 0x44, 0x32, 0xa5, 0xa5, 0x1d, 0x0f, 0xc6, 0xa8, 0x20, 0x8b, 0x14, 0x4f, 0x11, - 0xea, 0x2f, 0x68, 0x3d, 0x28, 0x88, 0x97, 0xb4, 0xf7, 0x12, 0xbb, 0x43, 0x09, 0xd8, 0x24, 0xee, - 0xac, 0xfe, 0x5f, 0xff, 0x89, 0xb0, 0xbb, 0xcd, 0x57, 0x8b, 0x27, 0x4f, 0xc9, 0xec, 0xbc, 0x7d, - 0x6f, 0xe6, 0xbd, 0x85, 0x6b, 0x26, 0xf5, 0x9d, 0x6d, 0x88, 0x1f, 0xb8, 0x54, 0x59, 0xae, 0xdd, - 0x27, 0x2e, 0x55, 0xa1, 0x0b, 0xf4, 0x6d, 0x25, 0xa6, 0x30, 0x9e, 0x4b, 0xb9, 0x34, 0x45, 0x42, - 0x9f, 0x5f, 0xc4, 0x1a, 0x11, 0x4e, 0xf3, 0x74, 0x47, 0xa1, 0x77, 0xeb, 0xdd, 0x07, 0x89, 0xf9, - 0x17, 0x2b, 0x98, 0x34, 0x70, 0x5c, 0x16, 0x39, 0x13, 0x4e, 0xc1, 0xd1, 0x18, 0xe8, 0x60, 0x36, - 0x8a, 0x9d, 0x86, 0xc3, 0xb9, 0x2e, 0x5e, 0xc0, 0x19, 0x29, 0xb5, 0xde, 0xf1, 0x36, 0xec, 0x1b, - 0x4e, 0x9f, 0x94, 0x7a, 0xe3, 0xad, 0xb8, 0x03, 0x7c, 0xfa, 0x28, 0x98, 0xda, 0xfa, 0x23, 0xe8, - 0x67, 0xd2, 0xa9, 0xf7, 0x33, 0x29, 0x62, 0x38, 0x6f, 0xa1, 0x9c, 0x7a, 0x83, 0xd5, 0x6b, 0xb1, - 0x0a, 0x18, 0x3f, 0x93, 0xfe, 0x9b, 0x73, 0x05, 0x93, 0x06, 0xe6, 0xbf, 0xf6, 0x79, 0x05, 0xdf, - 0x42, 0xbb, 0x7a, 0x78, 0x03, 0xb0, 0x51, 0x94, 0x6a, 0x92, 0xeb, 0x54, 0x9b, 0x5b, 0xc3, 0x24, - 0x70, 0x27, 0xf3, 0xda, 0xf2, 0x93, 0xda, 0xf2, 0xd9, 0x8f, 0x07, 0x43, 0xcb, 0xb6, 0xb4, 0x49, - 0xe2, 0x02, 0x82, 0x2a, 0x04, 0x0c, 0xf7, 0xc3, 0x75, 0xf3, 0x8b, 0x2e, 0x8f, 0x74, 0xec, 0x86, - 0xa2, 0x87, 0x2f, 0x30, 0x68, 0x98, 0x89, 0xd1, 0x1e, 0x7b, 0x98, 0x43, 0x74, 0x75, 0xb4, 0x57, - 0x31, 0x2d, 0x20, 0xa8, 0x2c, 0xac, 0xa7, 0xe9, 0x3a, 0x5f, 0x4f, 0x73, 0xe0, 0xb7, 0xe8, 0xbd, - 0xfb, 0xe6, 0x35, 0x3e, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x9e, 0xb2, 0x1e, 0xad, 0x02, - 0x00, 0x00, -} diff --git a/examples/go-kit/services/sprint/gen/transports/grpc/grpc.go b/examples/go-kit/services/sprint/gen/transports/grpc/grpc.go deleted file mode 100644 index a301589..0000000 --- a/examples/go-kit/services/sprint/gen/transports/grpc/grpc.go +++ /dev/null @@ -1,109 +0,0 @@ -package sprint_grpctransport - -import ( - context "context" - "fmt" - - grpctransport "github.com/go-kit/kit/transport/grpc" - oldcontext "golang.org/x/net/context" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" -) - -// avoid import errors -var _ = fmt.Errorf - -func MakeGRPCServer(endpoints endpoints.Endpoints) pb.SprintServiceServer { - var options []grpctransport.ServerOption - _ = options - return &grpcServer{ - - addsprint: grpctransport.NewServer( - endpoints.AddSprintEndpoint, - decodeRequest, - encodeAddSprintResponse, - options..., - ), - - closesprint: grpctransport.NewServer( - endpoints.CloseSprintEndpoint, - decodeRequest, - encodeCloseSprintResponse, - options..., - ), - - getsprint: grpctransport.NewServer( - endpoints.GetSprintEndpoint, - decodeRequest, - encodeGetSprintResponse, - options..., - ), - } -} - -type grpcServer struct { - addsprint grpctransport.Handler - - closesprint grpctransport.Handler - - getsprint grpctransport.Handler -} - -func (s *grpcServer) AddSprint(ctx oldcontext.Context, req *pb.AddSprintRequest) (*pb.AddSprintResponse, error) { - _, rep, err := s.addsprint.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.AddSprintResponse), nil -} - -func encodeAddSprintResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.AddSprintResponse) - return resp, nil -} - -func (s *grpcServer) CloseSprint(ctx oldcontext.Context, req *pb.CloseSprintRequest) (*pb.CloseSprintResponse, error) { - _, rep, err := s.closesprint.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.CloseSprintResponse), nil -} - -func encodeCloseSprintResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.CloseSprintResponse) - return resp, nil -} - -func (s *grpcServer) GetSprint(ctx oldcontext.Context, req *pb.GetSprintRequest) (*pb.GetSprintResponse, error) { - _, rep, err := s.getsprint.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.GetSprintResponse), nil -} - -func encodeGetSprintResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.GetSprintResponse) - return resp, nil -} - -func decodeRequest(ctx context.Context, grpcReq interface{}) (interface{}, error) { - return grpcReq, nil -} - -type streamHandler interface { - Do(server interface{}, req interface{}) (err error) -} - -type server struct { - e endpoints.StreamEndpoint -} - -func (s server) Do(server interface{}, req interface{}) (err error) { - if err := s.e(server, req); err != nil { - return err - } - return nil -} diff --git a/examples/go-kit/services/sprint/gen/transports/http/http.go b/examples/go-kit/services/sprint/gen/transports/http/http.go deleted file mode 100644 index 40259f5..0000000 --- a/examples/go-kit/services/sprint/gen/transports/http/http.go +++ /dev/null @@ -1,86 +0,0 @@ -package sprint_httptransport - -import ( - "context" - "encoding/json" - "log" - "net/http" - - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" -) - -var _ = log.Printf -var _ = gokit_endpoint.Chain -var _ = httptransport.NewClient - -func MakeAddSprintHandler(svc pb.SprintServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeAddSprintRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeAddSprintRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.AddSprintRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func MakeCloseSprintHandler(svc pb.SprintServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeCloseSprintRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeCloseSprintRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.CloseSprintRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func MakeGetSprintHandler(svc pb.SprintServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeGetSprintRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeGetSprintRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.GetSprintRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error { - return json.NewEncoder(w).Encode(response) -} - -func RegisterHandlers(svc pb.SprintServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - - log.Println("new HTTP endpoint: \"/AddSprint\" (service=Sprint)") - mux.Handle("/AddSprint", MakeAddSprintHandler(svc, endpoints.AddSprintEndpoint)) - - log.Println("new HTTP endpoint: \"/CloseSprint\" (service=Sprint)") - mux.Handle("/CloseSprint", MakeCloseSprintHandler(svc, endpoints.CloseSprintEndpoint)) - - log.Println("new HTTP endpoint: \"/GetSprint\" (service=Sprint)") - mux.Handle("/GetSprint", MakeGetSprintHandler(svc, endpoints.GetSprintEndpoint)) - - return nil -} diff --git a/examples/go-kit/services/sprint/service.go b/examples/go-kit/services/sprint/service.go deleted file mode 100644 index 0b3606f..0000000 --- a/examples/go-kit/services/sprint/service.go +++ /dev/null @@ -1,27 +0,0 @@ -package sprintsvc - -import ( - "fmt" - - "golang.org/x/net/context" - - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/sprint/gen/pb" -) - -type Service struct{} - -func New() pb.SprintServiceServer { - return &Service{} -} - -func (svc *Service) AddSprint(ctx context.Context, in *pb.AddSprintRequest) (*pb.AddSprintResponse, error) { - return nil, fmt.Errorf("not implemented") -} - -func (svc *Service) CloseSprint(ctx context.Context, in *pb.CloseSprintRequest) (*pb.CloseSprintResponse, error) { - return nil, fmt.Errorf("not implemented") -} - -func (svc *Service) GetSprint(ctx context.Context, in *pb.GetSprintRequest) (*pb.GetSprintResponse, error) { - return nil, fmt.Errorf("not implemented") -} diff --git a/examples/go-kit/services/sprint/sprint.proto b/examples/go-kit/services/sprint/sprint.proto deleted file mode 100644 index dfb6a33..0000000 --- a/examples/go-kit/services/sprint/sprint.proto +++ /dev/null @@ -1,38 +0,0 @@ -syntax = "proto3"; - -package sprint; - -service SprintService { - rpc AddSprint(AddSprintRequest) returns (AddSprintResponse) {} - rpc CloseSprint(CloseSprintRequest) returns (CloseSprintResponse) {} - rpc GetSprint(GetSprintRequest) returns (GetSprintResponse) {} -} - -message AddSprintRequest { - string name = 1; -} -message AddSprintResponse { - Sprint sprint = 1; - string err_msg = 2; -} - -message CloseSprintRequest { - string id = 1; -} -message CloseSprintResponse { - string err_msg = 1; -} - -message GetSprintRequest { - string id = 1; -} -message GetSprintResponse { - Sprint sprint = 1; - string err_msg = 2; -} - -message Sprint { - string id = 1; - uint32 created_at = 2; - string name = 3; -} \ No newline at end of file diff --git a/examples/go-kit/services/user/gen/README.md b/examples/go-kit/services/user/gen/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/examples/go-kit/services/user/gen/client/grpc/client.go b/examples/go-kit/services/user/gen/client/grpc/client.go deleted file mode 100644 index ea61cb8..0000000 --- a/examples/go-kit/services/user/gen/client/grpc/client.go +++ /dev/null @@ -1,70 +0,0 @@ -package user_clientgrpc - -import ( - context "context" - - jwt "github.com/go-kit/kit/auth/jwt" - "github.com/go-kit/kit/endpoint" - "github.com/go-kit/kit/log" - grpctransport "github.com/go-kit/kit/transport/grpc" - "google.golang.org/grpc" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) - -func New(conn *grpc.ClientConn, logger log.Logger) pb.UserServiceServer { - - var createuserEndpoint endpoint.Endpoint - { - createuserEndpoint = grpctransport.NewClient( - conn, - "user.UserService", - "CreateUser", - EncodeCreateUserRequest, - DecodeCreateUserResponse, - pb.CreateUserResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - var getuserEndpoint endpoint.Endpoint - { - getuserEndpoint = grpctransport.NewClient( - conn, - "user.UserService", - "GetUser", - EncodeGetUserRequest, - DecodeGetUserResponse, - pb.GetUserResponse{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - - return &endpoints.Endpoints{ - - CreateUserEndpoint: createuserEndpoint, - - GetUserEndpoint: getuserEndpoint, - } -} - -func EncodeCreateUserRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.CreateUserRequest) - return req, nil -} - -func DecodeCreateUserResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.CreateUserResponse) - return response, nil -} - -func EncodeGetUserRequest(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.GetUserRequest) - return req, nil -} - -func DecodeGetUserResponse(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.GetUserResponse) - return response, nil -} diff --git a/examples/go-kit/services/user/gen/endpoints/endpoints.go b/examples/go-kit/services/user/gen/endpoints/endpoints.go deleted file mode 100644 index 7bada77..0000000 --- a/examples/go-kit/services/user/gen/endpoints/endpoints.go +++ /dev/null @@ -1,69 +0,0 @@ -package user_endpoints - -import ( - context "context" - "fmt" - - "github.com/go-kit/kit/endpoint" - oldcontext "golang.org/x/net/context" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) - -var _ = endpoint.Chain -var _ = fmt.Errorf -var _ = context.Background - -type StreamEndpoint func(server interface{}, req interface{}) (err error) - -type Endpoints struct { - CreateUserEndpoint endpoint.Endpoint - - GetUserEndpoint endpoint.Endpoint -} - -func (e *Endpoints) CreateUser(ctx oldcontext.Context, in *pb.CreateUserRequest) (*pb.CreateUserResponse, error) { - out, err := e.CreateUserEndpoint(ctx, in) - if err != nil { - return &pb.CreateUserResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.CreateUserResponse), err -} - -func (e *Endpoints) GetUser(ctx oldcontext.Context, in *pb.GetUserRequest) (*pb.GetUserResponse, error) { - out, err := e.GetUserEndpoint(ctx, in) - if err != nil { - return &pb.GetUserResponse{ErrMsg: err.Error()}, err - } - return out.(*pb.GetUserResponse), err -} - -func MakeCreateUserEndpoint(svc pb.UserServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.CreateUserRequest) - rep, err := svc.CreateUser(ctx, req) - if err != nil { - return &pb.CreateUserResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeGetUserEndpoint(svc pb.UserServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.GetUserRequest) - rep, err := svc.GetUser(ctx, req) - if err != nil { - return &pb.GetUserResponse{ErrMsg: err.Error()}, err - } - return rep, nil - } -} - -func MakeEndpoints(svc pb.UserServiceServer) Endpoints { - return Endpoints{ - - CreateUserEndpoint: MakeCreateUserEndpoint(svc), - - GetUserEndpoint: MakeGetUserEndpoint(svc), - } -} diff --git a/examples/go-kit/services/user/gen/pb/pb.go b/examples/go-kit/services/user/gen/pb/pb.go deleted file mode 100644 index 1327d40..0000000 --- a/examples/go-kit/services/user/gen/pb/pb.go +++ /dev/null @@ -1,276 +0,0 @@ -// Code generated by protoc-gen-go. -// source: services/user/user.proto -// DO NOT EDIT! - -/* -Package user is a generated protocol buffer package. - -It is generated from these files: - services/user/user.proto - -It has these top-level messages: - CreateUserRequest - CreateUserResponse - GetUserRequest - GetUserResponse - User -*/ -package user - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type CreateUserRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` -} - -func (m *CreateUserRequest) Reset() { *m = CreateUserRequest{} } -func (m *CreateUserRequest) String() string { return proto.CompactTextString(m) } -func (*CreateUserRequest) ProtoMessage() {} -func (*CreateUserRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *CreateUserRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -type CreateUserResponse struct { - User *User `protobuf:"bytes,1,opt,name=user" json:"user,omitempty"` - ErrMsg string `protobuf:"bytes,2,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *CreateUserResponse) Reset() { *m = CreateUserResponse{} } -func (m *CreateUserResponse) String() string { return proto.CompactTextString(m) } -func (*CreateUserResponse) ProtoMessage() {} -func (*CreateUserResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *CreateUserResponse) GetUser() *User { - if m != nil { - return m.User - } - return nil -} - -func (m *CreateUserResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -type GetUserRequest struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` -} - -func (m *GetUserRequest) Reset() { *m = GetUserRequest{} } -func (m *GetUserRequest) String() string { return proto.CompactTextString(m) } -func (*GetUserRequest) ProtoMessage() {} -func (*GetUserRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -func (m *GetUserRequest) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type GetUserResponse struct { - User *User `protobuf:"bytes,1,opt,name=user" json:"user,omitempty"` - ErrMsg string `protobuf:"bytes,2,opt,name=err_msg,json=errMsg" json:"err_msg,omitempty"` -} - -func (m *GetUserResponse) Reset() { *m = GetUserResponse{} } -func (m *GetUserResponse) String() string { return proto.CompactTextString(m) } -func (*GetUserResponse) ProtoMessage() {} -func (*GetUserResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *GetUserResponse) GetUser() *User { - if m != nil { - return m.User - } - return nil -} - -func (m *GetUserResponse) GetErrMsg() string { - if m != nil { - return m.ErrMsg - } - return "" -} - -type User struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` -} - -func (m *User) Reset() { *m = User{} } -func (m *User) String() string { return proto.CompactTextString(m) } -func (*User) ProtoMessage() {} -func (*User) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } - -func (m *User) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *User) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func init() { - proto.RegisterType((*CreateUserRequest)(nil), "user.CreateUserRequest") - proto.RegisterType((*CreateUserResponse)(nil), "user.CreateUserResponse") - proto.RegisterType((*GetUserRequest)(nil), "user.GetUserRequest") - proto.RegisterType((*GetUserResponse)(nil), "user.GetUserResponse") - proto.RegisterType((*User)(nil), "user.User") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// Client API for UserService service - -type UserServiceClient interface { - CreateUser(ctx context.Context, in *CreateUserRequest, opts ...grpc.CallOption) (*CreateUserResponse, error) - GetUser(ctx context.Context, in *GetUserRequest, opts ...grpc.CallOption) (*GetUserResponse, error) -} - -type userServiceClient struct { - cc *grpc.ClientConn -} - -func NewUserServiceClient(cc *grpc.ClientConn) UserServiceClient { - return &userServiceClient{cc} -} - -func (c *userServiceClient) CreateUser(ctx context.Context, in *CreateUserRequest, opts ...grpc.CallOption) (*CreateUserResponse, error) { - out := new(CreateUserResponse) - err := grpc.Invoke(ctx, "/user.UserService/CreateUser", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *userServiceClient) GetUser(ctx context.Context, in *GetUserRequest, opts ...grpc.CallOption) (*GetUserResponse, error) { - out := new(GetUserResponse) - err := grpc.Invoke(ctx, "/user.UserService/GetUser", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Server API for UserService service - -type UserServiceServer interface { - CreateUser(context.Context, *CreateUserRequest) (*CreateUserResponse, error) - GetUser(context.Context, *GetUserRequest) (*GetUserResponse, error) -} - -func RegisterUserServiceServer(s *grpc.Server, srv UserServiceServer) { - s.RegisterService(&_UserService_serviceDesc, srv) -} - -func _UserService_CreateUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateUserRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServiceServer).CreateUser(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/user.UserService/CreateUser", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServiceServer).CreateUser(ctx, req.(*CreateUserRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _UserService_GetUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetUserRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(UserServiceServer).GetUser(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/user.UserService/GetUser", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(UserServiceServer).GetUser(ctx, req.(*GetUserRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _UserService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "user.UserService", - HandlerType: (*UserServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateUser", - Handler: _UserService_CreateUser_Handler, - }, - { - MethodName: "GetUser", - Handler: _UserService_GetUser_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "services/user/user.proto", -} - -func init() { proto.RegisterFile("services/user/user.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 236 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0x92, 0x28, 0x4e, 0x2d, 0x2a, - 0xcb, 0x4c, 0x4e, 0x2d, 0xd6, 0x2f, 0x2d, 0x4e, 0x2d, 0x02, 0x13, 0x7a, 0x05, 0x45, 0xf9, 0x25, - 0xf9, 0x42, 0x2c, 0x20, 0xb6, 0x92, 0x3a, 0x97, 0xa0, 0x73, 0x51, 0x6a, 0x62, 0x49, 0x6a, 0x68, - 0x71, 0x6a, 0x51, 0x50, 0x6a, 0x61, 0x69, 0x6a, 0x71, 0x89, 0x90, 0x10, 0x17, 0x4b, 0x5e, 0x62, - 0x6e, 0xaa, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x98, 0xad, 0xe4, 0xcb, 0x25, 0x84, 0xac, - 0xb0, 0xb8, 0x20, 0x3f, 0xaf, 0x38, 0x55, 0x48, 0x8e, 0x0b, 0x6c, 0x0c, 0x58, 0x25, 0xb7, 0x11, - 0x97, 0x1e, 0xd8, 0x7c, 0xb0, 0x0a, 0xb0, 0xb8, 0x90, 0x38, 0x17, 0x7b, 0x6a, 0x51, 0x51, 0x7c, - 0x6e, 0x71, 0xba, 0x04, 0x13, 0xd8, 0x30, 0xb6, 0xd4, 0xa2, 0x22, 0xdf, 0xe2, 0x74, 0x25, 0x05, - 0x2e, 0x3e, 0xf7, 0xd4, 0x12, 0x64, 0x4b, 0xf9, 0xb8, 0x98, 0x32, 0x53, 0xa0, 0x56, 0x32, 0x65, - 0xa6, 0x28, 0x79, 0x71, 0xf1, 0xc3, 0x55, 0x50, 0x6a, 0x9b, 0x16, 0x17, 0x0b, 0x48, 0x19, 0xba, - 0x1d, 0x70, 0x8f, 0x32, 0x21, 0x3c, 0x6a, 0xd4, 0xc5, 0xc8, 0xc5, 0x0d, 0x52, 0x1c, 0x0c, 0x09, - 0x38, 0x21, 0x47, 0x2e, 0x2e, 0x84, 0xc7, 0x85, 0xc4, 0x21, 0x96, 0x62, 0x84, 0x99, 0x94, 0x04, - 0xa6, 0x04, 0xc4, 0xd5, 0x4a, 0x0c, 0x42, 0x16, 0x5c, 0xec, 0x50, 0xaf, 0x08, 0x89, 0x40, 0x94, - 0xa1, 0xfa, 0x5d, 0x4a, 0x14, 0x4d, 0x14, 0xa6, 0x33, 0x89, 0x0d, 0x1c, 0x57, 0xc6, 0x80, 0x00, - 0x00, 0x00, 0xff, 0xff, 0xce, 0xde, 0xa3, 0x2e, 0xc7, 0x01, 0x00, 0x00, -} diff --git a/examples/go-kit/services/user/gen/transports/grpc/grpc.go b/examples/go-kit/services/user/gen/transports/grpc/grpc.go deleted file mode 100644 index 3d225ba..0000000 --- a/examples/go-kit/services/user/gen/transports/grpc/grpc.go +++ /dev/null @@ -1,87 +0,0 @@ -package user_grpctransport - -import ( - context "context" - "fmt" - - grpctransport "github.com/go-kit/kit/transport/grpc" - oldcontext "golang.org/x/net/context" - - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) - -// avoid import errors -var _ = fmt.Errorf - -func MakeGRPCServer(endpoints endpoints.Endpoints) pb.UserServiceServer { - var options []grpctransport.ServerOption - _ = options - return &grpcServer{ - - createuser: grpctransport.NewServer( - endpoints.CreateUserEndpoint, - decodeRequest, - encodeCreateUserResponse, - options..., - ), - - getuser: grpctransport.NewServer( - endpoints.GetUserEndpoint, - decodeRequest, - encodeGetUserResponse, - options..., - ), - } -} - -type grpcServer struct { - createuser grpctransport.Handler - - getuser grpctransport.Handler -} - -func (s *grpcServer) CreateUser(ctx oldcontext.Context, req *pb.CreateUserRequest) (*pb.CreateUserResponse, error) { - _, rep, err := s.createuser.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.CreateUserResponse), nil -} - -func encodeCreateUserResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.CreateUserResponse) - return resp, nil -} - -func (s *grpcServer) GetUser(ctx oldcontext.Context, req *pb.GetUserRequest) (*pb.GetUserResponse, error) { - _, rep, err := s.getuser.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.GetUserResponse), nil -} - -func encodeGetUserResponse(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.GetUserResponse) - return resp, nil -} - -func decodeRequest(ctx context.Context, grpcReq interface{}) (interface{}, error) { - return grpcReq, nil -} - -type streamHandler interface { - Do(server interface{}, req interface{}) (err error) -} - -type server struct { - e endpoints.StreamEndpoint -} - -func (s server) Do(server interface{}, req interface{}) (err error) { - if err := s.e(server, req); err != nil { - return err - } - return nil -} diff --git a/examples/go-kit/services/user/gen/transports/http/http.go b/examples/go-kit/services/user/gen/transports/http/http.go deleted file mode 100644 index 498cd5b..0000000 --- a/examples/go-kit/services/user/gen/transports/http/http.go +++ /dev/null @@ -1,66 +0,0 @@ -package user_httptransport - -import ( - "context" - "encoding/json" - "log" - "net/http" - - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) - -var _ = log.Printf -var _ = gokit_endpoint.Chain -var _ = httptransport.NewClient - -func MakeCreateUserHandler(svc pb.UserServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeCreateUserRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeCreateUserRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.CreateUserRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func MakeGetUserHandler(svc pb.UserServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decodeGetUserRequest, - encodeResponse, - []httptransport.ServerOption{}..., - ) -} - -func decodeGetUserRequest(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.GetUserRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil -} - -func encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error { - return json.NewEncoder(w).Encode(response) -} - -func RegisterHandlers(svc pb.UserServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - - log.Println("new HTTP endpoint: \"/CreateUser\" (service=User)") - mux.Handle("/CreateUser", MakeCreateUserHandler(svc, endpoints.CreateUserEndpoint)) - - log.Println("new HTTP endpoint: \"/GetUser\" (service=User)") - mux.Handle("/GetUser", MakeGetUserHandler(svc, endpoints.GetUserEndpoint)) - - return nil -} diff --git a/examples/go-kit/services/user/service.go b/examples/go-kit/services/user/service.go deleted file mode 100644 index 253bf5c..0000000 --- a/examples/go-kit/services/user/service.go +++ /dev/null @@ -1,21 +0,0 @@ -package usersvc - -import ( - "fmt" - - "golang.org/x/net/context" - - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) - -type Service struct{} - -func New() pb.UserServiceServer { return &Service{} } - -func (svc *Service) CreateUser(ctx context.Context, in *pb.CreateUserRequest) (*pb.CreateUserResponse, error) { - return nil, fmt.Errorf("not implemented") -} - -func (svc *Service) GetUser(ctx context.Context, in *pb.GetUserRequest) (*pb.GetUserResponse, error) { - return nil, fmt.Errorf("not implemented") -} diff --git a/examples/go-kit/services/user/user.proto b/examples/go-kit/services/user/user.proto deleted file mode 100644 index f74c299..0000000 --- a/examples/go-kit/services/user/user.proto +++ /dev/null @@ -1,29 +0,0 @@ -syntax = "proto3"; - -package user; - -service UserService { - rpc CreateUser(CreateUserRequest) returns (CreateUserResponse) {} - rpc GetUser(GetUserRequest) returns (GetUserResponse) {} -} - -message CreateUserRequest { - string name = 1; -} -message CreateUserResponse { - User user = 1; - string err_msg = 2; -} - -message GetUserRequest { - string id = 1; -} -message GetUserResponse { - User user = 1; - string err_msg = 2; -} - -message User { - string id = 1; - string name = 2; -} \ No newline at end of file diff --git a/examples/go-kit/templates/{{.File.Package}}/gen/README.md.tmpl b/examples/go-kit/templates/{{.File.Package}}/gen/README.md.tmpl deleted file mode 100644 index e69de29..0000000 diff --git a/examples/go-kit/templates/{{.File.Package}}/gen/client/grpc/client.go.tmpl b/examples/go-kit/templates/{{.File.Package}}/gen/client/grpc/client.go.tmpl deleted file mode 100644 index 6300d2e..0000000 --- a/examples/go-kit/templates/{{.File.Package}}/gen/client/grpc/client.go.tmpl +++ /dev/null @@ -1,57 +0,0 @@ -package {{.File.Package}}_clientgrpc - -import ( - context "context" - - "github.com/go-kit/kit/log" - "google.golang.org/grpc" - grpctransport "github.com/go-kit/kit/transport/grpc" - "github.com/go-kit/kit/endpoint" - jwt "github.com/go-kit/kit/auth/jwt" - - pb "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/pb" - endpoints "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/endpoints" -) - -{{$file:=.File}} - -func New(conn *grpc.ClientConn, logger log.Logger) pb.{{.File.Package | title}}ServiceServer { - {{range .Service.Method}} - {{if and (not .ServerStreaming) (not .ClientStreaming)}} - var {{.Name | lower}}Endpoint endpoint.Endpoint - { - {{.Name | lower}}Endpoint = grpctransport.NewClient( - conn, - "{{$file.Package}}.{{$file.Package | title}}Service", - "{{.Name}}", - Encode{{.Name}}Request, - Decode{{.Name}}Response, - pb.{{.Name}}Response{}, - append([]grpctransport.ClientOption{}, grpctransport.ClientBefore(jwt.FromGRPCContext()))..., - ).Endpoint() - } - {{end}} - {{end}} - - return &endpoints.Endpoints { - {{range .Service.Method}} - {{if and (not .ServerStreaming) (not .ClientStreaming)}} - {{.Name | title}}Endpoint: {{.Name | lower}}Endpoint, - {{end}} - {{end}} - } -} - -{{range .Service.Method}} - {{if and (not .ServerStreaming) (not .ClientStreaming)}} - func Encode{{.Name}}Request(_ context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.{{.Name}}Request) - return req, nil - } - - func Decode{{.Name}}Response(_ context.Context, grpcResponse interface{}) (interface{}, error) { - response := grpcResponse.(*pb.{{.Name}}Response) - return response, nil - } - {{end}} -{{end}} diff --git a/examples/go-kit/templates/{{.File.Package}}/gen/endpoints/endpoints.go.tmpl b/examples/go-kit/templates/{{.File.Package}}/gen/endpoints/endpoints.go.tmpl deleted file mode 100644 index 0cc4e17..0000000 --- a/examples/go-kit/templates/{{.File.Package}}/gen/endpoints/endpoints.go.tmpl +++ /dev/null @@ -1,89 +0,0 @@ -package {{.File.Package}}_endpoints - -{{$file := .File}} - -import ( - context "context" - "fmt" - - oldcontext "golang.org/x/net/context" - pb "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/pb" - "github.com/go-kit/kit/endpoint" -) - -var _ = endpoint.Chain -var _ = fmt.Errorf -var _ = context.Background - -type StreamEndpoint func(server interface{}, req interface{}) (err error) - -type Endpoints struct { - {{range .Service.Method}} - {{if or (.ClientStreaming) (.ServerStreaming)}} - {{.Name}}Endpoint StreamEndpoint - {{else}} - {{.Name}}Endpoint endpoint.Endpoint - {{end}} - {{end}} -} - -{{range .Service.Method}} - {{if .ServerStreaming}} - {{if .ClientStreaming}} - func (e *Endpoints){{.Name}}(server pb.{{$file.Package | title}}Service_{{.Name}}Server) error { - return fmt.Errorf("not implemented") - } - {{else}} - func (e *Endpoints){{.Name}}(in *pb.{{.Name}}Request, server pb.{{$file.Package | title}}Service_{{.Name}}Server) error { - return fmt.Errorf("not implemented") - } - {{end}} - {{else}} - {{if .ClientStreaming}} - func (e *Endpoints){{.Name}}(server pb.{{$file.Package | title}}Service_{{.Name}}Server) error { - return fmt.Errorf("not implemented") - } - {{else}} - func (e *Endpoints){{.Name}}(ctx oldcontext.Context, in *pb.{{.InputType | splitArray "." | last}}) (*pb.{{.OutputType | splitArray "." | last}}, error) { - out, err := e.{{.Name}}Endpoint(ctx, in) - if err != nil { - return &pb.{{.OutputType | splitArray "." | last}}{ErrMsg: err.Error()}, err - } - return out.(*pb.{{.OutputType | splitArray "." | last}}), err - } - {{end}} - {{end}} -{{end}} - -{{range .Service.Method}} - {{if or (.ServerStreaming) (.ClientStreaming)}} - func Make{{.Name}}Endpoint(svc pb.{{$file.Package | title}}ServiceServer) StreamEndpoint { - return func(server interface{}, request interface{}) error { - {{if .ClientStreaming}} - return svc.{{.Name}}(server.(pb.{{$file.Package | title}}Service_{{.Name}}Server)) - {{else}} - return svc.{{.Name}}(request.(*pb.{{.Name}}Request), server.(pb.{{$file.Package | title}}Service_{{.Name}}Server)) - {{end}} - } - } - {{else}} - func Make{{.Name}}Endpoint(svc pb.{{$file.Package | title}}ServiceServer) endpoint.Endpoint { - return func(ctx context.Context, request interface{}) (interface{}, error) { - req := request.(*pb.{{.InputType | splitArray "." | last}}) - rep, err := svc.{{.Name}}(ctx, req) - if err != nil { - return &pb.{{.OutputType | splitArray "." | last}}{ErrMsg: err.Error()}, err - } - return rep, nil - } - } - {{end}} -{{end}} - -func MakeEndpoints(svc pb.{{.File.Package | title}}ServiceServer) Endpoints { - return Endpoints{ - {{range .Service.Method}} - {{.Name}}Endpoint: Make{{.Name}}Endpoint(svc), - {{end}} - } -} diff --git a/examples/go-kit/templates/{{.File.Package}}/gen/transports/grpc/grpc.go.tmpl b/examples/go-kit/templates/{{.File.Package}}/gen/transports/grpc/grpc.go.tmpl deleted file mode 100644 index a10ee70..0000000 --- a/examples/go-kit/templates/{{.File.Package}}/gen/transports/grpc/grpc.go.tmpl +++ /dev/null @@ -1,92 +0,0 @@ -package {{.File.Package}}_grpctransport - -{{$file := .File}} - -import ( - context "context" - "fmt" - - oldcontext "golang.org/x/net/context" - grpctransport "github.com/go-kit/kit/transport/grpc" - - pb "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/pb" - endpoints "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/endpoints" -) - -// avoid import errors -var _ = fmt.Errorf - -func MakeGRPCServer(endpoints endpoints.Endpoints) pb.{{.File.Package | title}}ServiceServer { - var options []grpctransport.ServerOption - _ = options - return &grpcServer{ - {{range .Service.Method}} - {{if or (.ClientStreaming) (.ServerStreaming)}} - {{.Name | lower}}: &server{ - e: endpoints.{{.Name}}Endpoint, - }, - {{else}} - {{.Name | lower}}: grpctransport.NewServer( - endpoints.{{.Name}}Endpoint, - decodeRequest, - encode{{.Name}}Response, - options..., - ), - {{end}} - {{end}} - } -} - -type grpcServer struct { - {{range .Service.Method}} - {{if or (.ClientStreaming) (.ServerStreaming)}} - {{.Name | lower}} streamHandler - {{else}} - {{.Name | lower}} grpctransport.Handler - {{end}} - {{end}} -} - -{{range .Service.Method}} - {{if .ClientStreaming}} - func (s *grpcServer) {{.Name}}(server pb.{{$file.Package | title}}Service_{{.Name}}Server) error { - return s.{{.Name | lower}}.Do(server, nil) - } - {{else if .ServerStreaming}} - func (s *grpcServer) {{.Name}}(req *pb.{{.Name}}Request, server pb.{{$file.Package | title}}Service_{{.Name}}Server) error { - return s.{{.Name | lower}}.Do(server, req) - } - {{else}} - func (s *grpcServer) {{.Name}}(ctx oldcontext.Context, req *pb.{{.InputType | splitArray "." | last}}) (*pb.{{.OutputType | splitArray "." | last}}, error) { - _, rep, err := s.{{.Name | lower}}.ServeGRPC(ctx, req) - if err != nil { - return nil, err - } - return rep.(*pb.{{.OutputType | splitArray "." | last}}), nil - } - - func encode{{.Name}}Response(ctx context.Context, response interface{}) (interface{}, error) { - resp := response.(*pb.{{.OutputType | splitArray "." | last}}) - return resp, nil - } - {{end}} -{{end}} - -func decodeRequest(ctx context.Context, grpcReq interface{}) (interface{}, error) { - return grpcReq, nil -} - -type streamHandler interface{ - Do(server interface{}, req interface{}) (err error) -} - -type server struct { - e endpoints.StreamEndpoint -} - -func (s server) Do(server interface{}, req interface{}) (err error) { - if err := s.e(server, req); err != nil { - return err - } - return nil -} diff --git a/examples/go-kit/templates/{{.File.Package}}/gen/transports/http/http.go.tmpl b/examples/go-kit/templates/{{.File.Package}}/gen/transports/http/http.go.tmpl deleted file mode 100644 index a5b577d..0000000 --- a/examples/go-kit/templates/{{.File.Package}}/gen/transports/http/http.go.tmpl +++ /dev/null @@ -1,55 +0,0 @@ -package {{.File.Package}}_httptransport - -{{$file := .File}} - -import ( - "log" - "net/http" - "encoding/json" - "context" - - pb "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/pb" - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "{{cat .GoPWD "/" .DestinationDir | nospace | clean}}/endpoints" -) - -var _ = log.Printf -var _ = gokit_endpoint.Chain -var _ = httptransport.NewClient - - -{{range .Service.Method}} - {{if and (not .ServerStreaming) (not .ClientStreaming)}} - func Make{{.Name}}Handler(svc pb.{{$file.Package | title}}ServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - endpoint, - decode{{.Name}}Request, - encodeResponse, - []httptransport.ServerOption{}..., - ) - } - - func decode{{.Name}}Request(ctx context.Context, r *http.Request) (interface{}, error) { - var req pb.{{.InputType | splitArray "." | last}} - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - return nil, err - } - return &req, nil - } - {{end}} -{{end}} - -func encodeResponse(ctx context.Context, w http.ResponseWriter, response interface{}) error { - return json.NewEncoder(w).Encode(response) -} - -func RegisterHandlers(svc pb.{{$file.Package | title}}ServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - {{range .Service.Method}} - {{if and (not .ServerStreaming) (not .ClientStreaming)}} - log.Println("new HTTP endpoint: \"/{{.Name}}\" (service={{$file.Package | title}})") - mux.Handle("/{{.Name}}", Make{{.Name}}Handler(svc, endpoints.{{.Name}}Endpoint)) - {{end}} - {{end}} - return nil -} diff --git a/examples/go.mod b/examples/go.mod deleted file mode 100644 index e69de29..0000000 diff --git a/examples/helpers/Makefile b/examples/helpers/Makefile deleted file mode 100644 index 99d4be8..0000000 --- a/examples/helpers/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=.,debug=true:. *.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/helpers/example.txt b/examples/helpers/example.txt deleted file mode 100644 index 0224f48..0000000 --- a/examples/helpers/example.txt +++ /dev/null @@ -1,58 +0,0 @@ -# Common variables -{{.File.Name}}: helpers.proto -{{.File.Name | upper}}: HELPERS.PROTO -{{.File.Package | base | replace "." "-"}} dummy -{{$packageDir := .File.Name | dir}}{{$packageDir}} . -{{$packageName := .File.Name | base | replace ".proto" ""}}{{$packageName}} helpers -{{$packageImport := .File.Package | replace "." "_"}}{{$packageImport}} dummy -{{$namespacedPackage := .File.Package}}{{$namespacedPackage}} dummy -{{$currentFile := .File.Name | getProtoFile}}{{$currentFile}} -{{- /*{{- $currentPackageName := $currentFile.GoPkg.Name}}{{$currentPackageName}}*/}} -# TODO: more variables - -# Sprig: strings -{{trim " hello "}}: hello -{{trimAll "$" "$5.00"}}: 5.00 -{{trimSuffix "-" "hello-"}}: hello -{{upper "hello"}}: HELLO -{{lower "HELLO"}}: hello -{{title "hello world"}}: Hello World -{{untitle "Hello World"}}: hello world -{{repeat 3 "hello"}}: hellohellohello -{{substr 0 5 "hello world"}}: hello -{{nospace "hello w o r l d"}}: helloworld -{{trunc 5 "hello world"}}: hello -{{abbrev 5 "hello world"}}: he... -{{abbrevboth 5 10 "1234 5678 9123"}}: ...5678... -{{initials "First Try"}}: FT -{{randNumeric 3}}: 528 -{{- /*{{wrap 80 $someText}}*/}}: -{{wrapWith 5 "\t" "Hello World"}}: Hello World -{{contains "cat" "catch"}}: true -{{hasPrefix "cat" "catch"}}: true -{{cat "hello" "beautiful" "world"}}: hello beautiful world -{{- /*{{indent 4 $lots_of_text}}*/}}: -{{- /*{{indent 4 $lots_of_text}}*/}}: -{{"I Am Henry VIII" | replace " " "-"}}: I-Am-Henry-VIII -{{len .Service.Method | plural "one anchovy" "many anchovies"}}: many anchovies -{{snakecase "FirstName"}}: first_name -{{camelcase "http_server"}}: HttpServer -{{shuffle "hello"}}: holle -{{regexMatch "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}" "test@acme.com"}}: true -{{- /*{{regexFindAll "[2,4,6,8]" "123456789"}}*/}}: -{{regexFind "[a-zA-Z][1-9]" "abcd1234"}}: d1 -{{regexReplaceAll "a(x*)b" "-ab-axxb-" "${1}W"}}: -W-xxW- -{{regexReplaceAllLiteral "a(x*)b" "-ab-axxb-" "${1}"}}: -${1}-${1}- -{{regexSplit "z+" "pizza" -1}}: [pi a] - -# Get one specific method on array method using index -{{ index .Service.Method 1 }}: name:"Iii" input_type:".dummy.Dummy2" output_type:".dummy.Dummy1" options:<> - -# Sprig: advanced -{{if contains "cat" "catch"}}yes{{else}}no{{end}}: yes -{{1 | plural "one anchovy" "many anchovies"}}: one anchovy -{{2 | plural "one anchovy" "many anchovies"}}: many anchovies -{{3 | plural "one anchovy" "many anchovies"}}: many anchovies - -# TODO: more sprig examples -# TODO: all built-in examples \ No newline at end of file diff --git a/examples/helpers/example.txt.tmpl b/examples/helpers/example.txt.tmpl deleted file mode 100644 index 21ed2ca..0000000 --- a/examples/helpers/example.txt.tmpl +++ /dev/null @@ -1,59 +0,0 @@ -# Common variables -{{`{{.File.Name}}`}}: {{.File.Name}} -{{`{{.File.Name | upper}}`}}: {{.File.Name | upper}} -{{`{{.File.Package | base | replace "." "-"}}`}} {{.File.Package | base | replace "." "-"}} -{{- /*{{`{{$file := .File}}{{$file}}`}} {{$file := .File}}{{$file}}*/}} -{{`{{$packageDir := .File.Name | dir}}{{$packageDir}}`}} {{$packageDir := .File.Name | dir}}{{$packageDir}} -{{`{{$packageName := .File.Name | base | replace ".proto" ""}}{{$packageName}}`}} {{$packageName := .File.Name | base | replace ".proto" ""}}{{$packageName}} -{{`{{$packageImport := .File.Package | replace "." "_"}}{{$packageImport}}`}} {{$packageImport := .File.Package | replace "." "_"}}{{$packageImport}} -{{`{{$namespacedPackage := .File.Package}}{{$namespacedPackage}}`}} {{$namespacedPackage := .File.Package}}{{$namespacedPackage}} -{{`{{$currentFile := .File.Name | getProtoFile}}{{$currentFile}}`}} {{$currentFile := .File.Name | getProtoFile}}{{$currentFile}} -{{`{{- /*{{- $currentPackageName := $currentFile.GoPkg.Name}}{{$currentPackageName}}*/}}`}} {{- /*{{- $currentPackageName := $currentFile.GoPkg.Name}}{{$currentPackageName}}*/}} -# TODO: more variables - -# Sprig: strings -{{`{{trim " hello "}}`}}: {{trim " hello "}} -{{`{{trimAll "$" "$5.00"}}`}}: {{trimAll "$" "$5.00"}} -{{`{{trimSuffix "-" "hello-"}}`}}: {{trimSuffix "-" "hello-"}} -{{`{{upper "hello"}}`}}: {{upper "hello"}} -{{`{{lower "HELLO"}}`}}: {{lower "HELLO"}} -{{`{{title "hello world"}}`}}: {{title "hello world"}} -{{`{{untitle "Hello World"}}`}}: {{untitle "Hello World"}} -{{`{{repeat 3 "hello"}}`}}: {{repeat 3 "hello"}} -{{`{{substr 0 5 "hello world"}}`}}: {{substr 0 5 "hello world"}} -{{`{{nospace "hello w o r l d"}}`}}: {{nospace "hello w o r l d"}} -{{`{{trunc 5 "hello world"}}`}}: {{trunc 5 "hello world"}} -{{`{{abbrev 5 "hello world"}}`}}: {{abbrev 5 "hello world"}} -{{`{{abbrevboth 5 10 "1234 5678 9123"}}`}}: {{abbrevboth 5 10 "1234 5678 9123"}} -{{`{{initials "First Try"}}`}}: {{initials "First Try"}} -{{`{{randNumeric 3}}`}}: {{randNumeric 3}} -{{`{{- /*{{wrap 80 $someText}}*/}}`}}: {{- /*{{wrap 80 $someText}}*/}} -{{`{{wrapWith 5 "\t" "Hello World"}}`}}: {{wrapWith 5 "\t" "Hello World"}} -{{`{{contains "cat" "catch"}}`}}: {{contains "cat" "catch"}} -{{`{{hasPrefix "cat" "catch"}}`}}: {{hasPrefix "cat" "catch"}} -{{`{{cat "hello" "beautiful" "world"}}`}}: {{cat "hello" "beautiful" "world"}} -{{`{{- /*{{indent 4 $lots_of_text}}*/}}`}}: {{- /*{{indent 4 $lots_of_text}}*/}} -{{`{{- /*{{indent 4 $lots_of_text}}*/}}`}}: {{- /*{{indent 4 $lots_of_text}}*/}} -{{`{{"I Am Henry VIII" | replace " " "-"}}`}}: {{"I Am Henry VIII" | replace " " "-"}} -{{`{{len .Service.Method | plural "one anchovy" "many anchovies"}}`}}: {{len .Service.Method | plural "one anchovy" "many anchovies"}} -{{`{{snakecase "FirstName"}}`}}: {{snakecase "FirstName"}} -{{`{{camelcase "http_server"}}`}}: {{camelcase "http_server"}} -{{`{{shuffle "hello"}}`}}: {{shuffle "hello"}} -{{`{{regexMatch "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}" "test@acme.com"}}`}}: {{regexMatch "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}" "test@acme.com"}} -{{`{{- /*{{regexFindAll "[2,4,6,8]" "123456789"}}*/}}`}}: {{- /*{{regexFindAll "[2,4,6,8]" "123456789"}}*/}} -{{`{{regexFind "[a-zA-Z][1-9]" "abcd1234"}}`}}: {{regexFind "[a-zA-Z][1-9]" "abcd1234"}} -{{`{{regexReplaceAll "a(x*)b" "-ab-axxb-" "${1}W"}}`}}: {{regexReplaceAll "a(x*)b" "-ab-axxb-" "${1}W"}} -{{`{{regexReplaceAllLiteral "a(x*)b" "-ab-axxb-" "${1}"}}`}}: {{regexReplaceAllLiteral "a(x*)b" "-ab-axxb-" "${1}"}} -{{`{{regexSplit "z+" "pizza" -1}}`}}: {{regexSplit "z+" "pizza" -1}} - -# Get one specific method on array method using index -{{`{{ index .Service.Method 1 }}`}}: {{ index .Service.Method 1 }} - -# Sprig: advanced -{{`{{if contains "cat" "catch"}}yes{{else}}no{{end}}`}}: {{if contains "cat" "catch"}}yes{{else}}no{{end}} -{{`{{1 | plural "one anchovy" "many anchovies"}}`}}: {{1 | plural "one anchovy" "many anchovies"}} -{{`{{2 | plural "one anchovy" "many anchovies"}}`}}: {{2 | plural "one anchovy" "many anchovies"}} -{{`{{3 | plural "one anchovy" "many anchovies"}}`}}: {{3 | plural "one anchovy" "many anchovies"}} - -# TODO: more sprig examples -# TODO: all built-in examples \ No newline at end of file diff --git a/examples/helpers/helpers.proto b/examples/helpers/helpers.proto deleted file mode 100644 index 6dbf122..0000000 --- a/examples/helpers/helpers.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package dummy; - -option go_package = "moul.io/protoc-gen-gotemplate/examples/helpers"; - -message Dummy1 { - float aaa = 1; - string bbb = 2; - int32 ccc = 3; - int64 ddd = 4; - repeated string eee = 5; -} - -message Dummy2 { - float fff = 1; - Dummy1 ggg = 2; -} - -message Dummy3 {} - -service DummyService { - rpc Hhh(Dummy1) returns (Dummy2) {} - rpc Iii(Dummy2) returns (Dummy1) {} -} diff --git a/examples/import/Makefile b/examples/import/Makefile deleted file mode 100644 index 9cf5779..0000000 --- a/examples/import/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -.PHONY: build -build: - mkdir -p output - - # generate pb.go inluding imported proto - protoc --go_out=Mproto/common.proto=moul.io/protoc-gen-gotemplate/examples/import/output/models/common:./output proto/article.proto - protoc --go_out=,plugins=grpc:./output proto/common.proto - - # build our go file based on our template - protoc -I. --gotemplate_out=template_dir=templates,debug=true:output proto/article.proto - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/import/output/models/article/article.pb.go b/examples/import/output/models/article/article.pb.go deleted file mode 100644 index 16b2651..0000000 --- a/examples/import/output/models/article/article.pb.go +++ /dev/null @@ -1,141 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: proto/article.proto - -/* -Package article is a generated protocol buffer package. - -It is generated from these files: - proto/article.proto - -It has these top-level messages: - GetArticleRequest - GetArticleResponse - Article -*/ -package article - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import common "moul.io/protoc-gen-gotemplate/examples/import/output/models/common" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type GetArticleRequest struct { - Getarticle *common.GetArticle `protobuf:"bytes,1,opt,name=getarticle" json:"getarticle,omitempty"` -} - -func (m *GetArticleRequest) Reset() { *m = GetArticleRequest{} } -func (m *GetArticleRequest) String() string { return proto.CompactTextString(m) } -func (*GetArticleRequest) ProtoMessage() {} -func (*GetArticleRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *GetArticleRequest) GetGetarticle() *common.GetArticle { - if m != nil { - return m.Getarticle - } - return nil -} - -type GetArticleResponse struct { - Article *Article `protobuf:"bytes,1,opt,name=article" json:"article,omitempty"` - // The generated output should write []*GetArticleResponse_Storage.Storage for this field. - Storages []*GetArticleResponse_Storage `protobuf:"bytes,2,rep,name=storages" json:"storages,omitempty"` -} - -func (m *GetArticleResponse) Reset() { *m = GetArticleResponse{} } -func (m *GetArticleResponse) String() string { return proto.CompactTextString(m) } -func (*GetArticleResponse) ProtoMessage() {} -func (*GetArticleResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *GetArticleResponse) GetArticle() *Article { - if m != nil { - return m.Article - } - return nil -} - -func (m *GetArticleResponse) GetStorages() []*GetArticleResponse_Storage { - if m != nil { - return m.Storages - } - return nil -} - -type GetArticleResponse_Storage struct { - Code string `protobuf:"bytes,1,opt,name=code" json:"code,omitempty"` -} - -func (m *GetArticleResponse_Storage) Reset() { *m = GetArticleResponse_Storage{} } -func (m *GetArticleResponse_Storage) String() string { return proto.CompactTextString(m) } -func (*GetArticleResponse_Storage) ProtoMessage() {} -func (*GetArticleResponse_Storage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1, 0} } - -func (m *GetArticleResponse_Storage) GetCode() string { - if m != nil { - return m.Code - } - return "" -} - -type Article struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` -} - -func (m *Article) Reset() { *m = Article{} } -func (m *Article) String() string { return proto.CompactTextString(m) } -func (*Article) ProtoMessage() {} -func (*Article) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -func (m *Article) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *Article) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func init() { - proto.RegisterType((*GetArticleRequest)(nil), "company.GetArticleRequest") - proto.RegisterType((*GetArticleResponse)(nil), "company.GetArticleResponse") - proto.RegisterType((*GetArticleResponse_Storage)(nil), "company.GetArticleResponse.Storage") - proto.RegisterType((*Article)(nil), "company.Article") -} - -func init() { proto.RegisterFile("proto/article.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 256 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x4b, 0xc4, 0x30, - 0x10, 0x85, 0x6d, 0x15, 0xab, 0xb3, 0x20, 0x3a, 0x82, 0x94, 0x8a, 0xb0, 0xd4, 0xcb, 0x22, 0x18, - 0xa1, 0x1e, 0x3d, 0x88, 0x5e, 0x7a, 0xaf, 0x78, 0xf1, 0x56, 0xd3, 0x61, 0x29, 0x6c, 0x3a, 0xb5, - 0x89, 0x82, 0xff, 0xc6, 0x9f, 0x2a, 0x9b, 0x4c, 0xd7, 0x8a, 0xb2, 0xa7, 0x4e, 0xf3, 0xbe, 0xf7, - 0xf2, 0xc8, 0xc0, 0x69, 0x3f, 0xb0, 0xe3, 0x9b, 0x7a, 0x70, 0xad, 0x5e, 0x91, 0xf2, 0x7f, 0x98, - 0x68, 0x36, 0x7d, 0xdd, 0x7d, 0x66, 0x18, 0x54, 0xcd, 0xc6, 0x70, 0x17, 0xc4, 0xbc, 0x84, 0x93, - 0x92, 0xdc, 0x43, 0x30, 0x54, 0xf4, 0xf6, 0x4e, 0xd6, 0x61, 0x01, 0xb0, 0x24, 0x27, 0x29, 0x69, - 0x34, 0x8f, 0x16, 0xb3, 0x02, 0x95, 0xf8, 0x26, 0xf8, 0x84, 0xca, 0xbf, 0x22, 0xc0, 0x69, 0x92, - 0xed, 0xb9, 0xb3, 0x84, 0x57, 0x90, 0xfc, 0xce, 0x39, 0x56, 0x52, 0x47, 0x8d, 0xe8, 0x08, 0xe0, - 0x3d, 0x1c, 0x58, 0xc7, 0x43, 0xbd, 0x24, 0x9b, 0xc6, 0xf3, 0xdd, 0xc5, 0xac, 0xb8, 0xdc, 0xc0, - 0x7f, 0xa3, 0xd5, 0x53, 0x60, 0xab, 0x8d, 0x29, 0xbb, 0x80, 0x44, 0x0e, 0x11, 0x61, 0x4f, 0x73, - 0x13, 0x2e, 0x3d, 0xac, 0xfc, 0x9c, 0x5f, 0x43, 0x22, 0x19, 0x78, 0x04, 0x71, 0xdb, 0x88, 0x18, - 0xb7, 0xcd, 0x1a, 0xef, 0x6a, 0x43, 0x69, 0x1c, 0xf0, 0xf5, 0x5c, 0x3c, 0x03, 0x48, 0x33, 0xfb, - 0xa1, 0xb1, 0x04, 0xf8, 0xe9, 0x80, 0xd9, 0xbf, 0xc5, 0xfc, 0xeb, 0x65, 0xe7, 0x5b, 0x4a, 0xe7, - 0x3b, 0x8f, 0xe9, 0xcb, 0x99, 0xe1, 0x86, 0x56, 0x76, 0x5c, 0xd3, 0x9d, 0x7c, 0x5f, 0xf7, 0xfd, - 0x4a, 0x6e, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x93, 0xb5, 0x4a, 0x95, 0xc6, 0x01, 0x00, 0x00, -} diff --git a/examples/import/output/models/common/common.pb.go b/examples/import/output/models/common/common.pb.go deleted file mode 100644 index 6cfb9ba..0000000 --- a/examples/import/output/models/common/common.pb.go +++ /dev/null @@ -1,69 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: proto/common.proto - -/* -Package common is a generated protocol buffer package. - -It is generated from these files: - proto/common.proto - -It has these top-level messages: - GetArticle -*/ -package common - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type GetArticle struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` - Tenant string `protobuf:"bytes,2,opt,name=tenant" json:"tenant,omitempty"` -} - -func (m *GetArticle) Reset() { *m = GetArticle{} } -func (m *GetArticle) String() string { return proto.CompactTextString(m) } -func (*GetArticle) ProtoMessage() {} -func (*GetArticle) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *GetArticle) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -func (m *GetArticle) GetTenant() string { - if m != nil { - return m.Tenant - } - return "" -} - -func init() { - proto.RegisterType((*GetArticle)(nil), "common.GetArticle") -} - -func init() { proto.RegisterFile("proto/common.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 110 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2a, 0x28, 0xca, 0x2f, - 0xc9, 0xd7, 0x4f, 0xce, 0xcf, 0xcd, 0xcd, 0xcf, 0xd3, 0x03, 0x73, 0x84, 0xd8, 0x20, 0x3c, 0x25, - 0x13, 0x2e, 0x2e, 0xf7, 0xd4, 0x12, 0xc7, 0xa2, 0x92, 0xcc, 0xe4, 0x9c, 0x54, 0x21, 0x3e, 0x2e, - 0xa6, 0xcc, 0x14, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0xa6, 0xcc, 0x14, 0x21, 0x31, 0x2e, - 0xb6, 0x92, 0xd4, 0xbc, 0xc4, 0xbc, 0x12, 0x09, 0x26, 0xb0, 0x18, 0x94, 0xe7, 0x24, 0x16, 0x25, - 0x92, 0x9b, 0x9f, 0x92, 0x9a, 0x53, 0x0c, 0x35, 0xd4, 0x1a, 0x42, 0x25, 0xb1, 0x81, 0x0d, 0x37, - 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x89, 0x5a, 0x1d, 0x73, 0x72, 0x00, 0x00, 0x00, -} diff --git a/examples/import/output/output.go b/examples/import/output/output.go deleted file mode 100644 index e651cda..0000000 --- a/examples/import/output/output.go +++ /dev/null @@ -1,26 +0,0 @@ -// Code generated by protoc-gen-gotemplate -package company - -import ( - "context" - - "moul.io/protoc-gen-gotemplate/examples/import/output/models/article" - "moul.io/protoc-gen-gotemplate/examples/import/output/models/common" -) - -type Repository interface { - GetArticle(getarticle *common.GetArticle) (*company.Article, []*company.Storage, error) -} - -// ------------------------- Public SDK ----------------------------- - -// GetArticle : proto: missing extension proto: missing extension -func (sdk *Sdk) GetArticle(ctx context.Context, - getarticle *article.GetArticle, token, requestID string) (article *article.Article, storages []*article.GetArticleResponse_Storage, err error) { - - out := &pb.GetArticleResponse{} - _ = out - - return out.Article, out.Storages, nil - -} diff --git a/examples/import/proto/article.proto b/examples/import/proto/article.proto deleted file mode 100644 index 2c8858f..0000000 --- a/examples/import/proto/article.proto +++ /dev/null @@ -1,31 +0,0 @@ -syntax = "proto3"; - -package company; - -option go_package = "models/article;article"; - -import "proto/common.proto"; - -message GetArticleRequest { - common.GetArticle getarticle = 1; -} - - -message GetArticleResponse { - Article article = 1; - - message Storage { - string code = 1; - } - // The generated output should write []*GetArticleResponse_Storage.Storage for this field. - repeated Storage storages = 2; -} - -message Article{ - string id = 1; - string name = 2; -} - -service articlesvc { - rpc GetArticle (GetArticleRequest) returns (GetArticleResponse){} -} \ No newline at end of file diff --git a/examples/import/proto/common.proto b/examples/import/proto/common.proto deleted file mode 100644 index 4603417..0000000 --- a/examples/import/proto/common.proto +++ /dev/null @@ -1,10 +0,0 @@ -syntax = "proto3"; - -package common; - -option go_package = "models/common;common"; - -message GetArticle{ - string id = 1; - string tenant = 2; -} diff --git a/examples/import/templates/output.go.tmpl b/examples/import/templates/output.go.tmpl deleted file mode 100644 index 219a3cd..0000000 --- a/examples/import/templates/output.go.tmpl +++ /dev/null @@ -1,41 +0,0 @@ -// Code generated by protoc-gen-gotemplate -{{- $file := .File}} -package {{.File.Package}} - -import ( - "moul.io/protoc-gen-gotemplate/examples/import/output/models/article" - "moul.io/protoc-gen-gotemplate/examples/import/output/models/common" -) - -type Repository interface { - {{range $m := .Service.Method}}{{with $t := $m.InputType | getMessageType $.File}} {{$m.Name}}({{range $f := $t.Field}}{{$f.Name|lowerCamelCase}} {{$f| goTypeWithPackage }} {{end}}) ({{with $out := $m.OutputType | getMessageType $.File}}{{range $f := $out.Field}}{{$f | goTypeWithPackage}}, {{end}}{{end}} error){{end}}{{end}} -} - - - - - - - -// ------------------------- Public SDK ----------------------------- -{{$pkg := "pb"}} - - -{{range $m := .Service.Method}} -{{with $t := $m.InputType | getMessageType $.File}} -{{if and (not $m.ServerStreaming) (not $m.ClientStreaming)}} -{{/* ----------------------------- nominal case ---------------------------- */}} -// {{$m.Name}} : {{$m | httpVerb}} {{$m | httpPath}} -func (sdk *Sdk) {{$m.Name}}(ctx context.Context, {{if $t.OneofDecl}} req *{{$pkg}}.{{$m.Name}}Request,{{else}}{{range $f := $t.Field}} - {{$f.Name|lowerCamelCase}} {{$f| goTypeWithGoPackage $.File}},{{end}}{{end}} token, requestID string)({{with $out := $m.OutputType | getMessageType $.File}}{{range $f := $out.Field}}{{$f.Name|lowerCamelCase}} {{$f | goTypeWithGoPackage $.File}}, {{end}}{{end}}err error) { - - out := &{{$pkg}}.{{$m.Name}}Response{} - _ = out - -{{with $out := $m.OutputType | getMessageType $.File}} - return {{range $f := $out.Field}}out.{{$f.Name|camelCase}}, {{end}}nil -{{end}} {{/* with */}} -} - -{{end}} {{/* streaming ifs */}} -{{end}}{{end}} {{/* range with */}} diff --git a/examples/k8s/Makefile b/examples/k8s/Makefile deleted file mode 100644 index e9af61d..0000000 --- a/examples/k8s/Makefile +++ /dev/null @@ -1,10 +0,0 @@ -SOURCES := $(shell find . -name "*.proto" -not -path ./vendor/\*) - -TARGETS_K8S := $(foreach source, $(SOURCES), $(source)_k8s) - -.PHONY: build -build: $(TARGETS_K8S) - -$(TARGETS_K8S): %_k8s: - @mkdir -p $(dir $*)gen - protoc $(PROTOC_OPTS) --gotemplate_out=debug=true,template_dir=./templates:$(dir $*)gen "$*" \ No newline at end of file diff --git a/examples/k8s/gen/nginx/dpl.yaml b/examples/k8s/gen/nginx/dpl.yaml deleted file mode 100644 index 342d1b0..0000000 --- a/examples/k8s/gen/nginx/dpl.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: nginx - labels: - method: Hello-World-nginx -spec: - replicas: 3 - template: - metadata: - labels: - app: nginx - spec: - containers: - - name: nginx - image: nginx - ports: - - containerPort: 80 \ No newline at end of file diff --git a/examples/k8s/gen/nginx/svc.yaml b/examples/k8s/gen/nginx/svc.yaml deleted file mode 100644 index 485e0e7..0000000 --- a/examples/k8s/gen/nginx/svc.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: nginx -spec: - selector: - app: nginx - ports: - - name: http - port: 80 \ No newline at end of file diff --git a/examples/k8s/nginx.proto b/examples/k8s/nginx.proto deleted file mode 100644 index 2b2d776..0000000 --- a/examples/k8s/nginx.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package nginx; - -message Empty { -} - -service Nginx { - rpc Hello(Empty) returns (Empty) {} - rpc World(Empty) returns (Empty) {} -} \ No newline at end of file diff --git a/examples/k8s/templates/{{.File.Package}}/dpl.yaml.tmpl b/examples/k8s/templates/{{.File.Package}}/dpl.yaml.tmpl deleted file mode 100644 index 946526c..0000000 --- a/examples/k8s/templates/{{.File.Package}}/dpl.yaml.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: {{.File.Package}} - labels: - method: {{range .Service.Method}}{{.Name}}-{{end}}{{.File.Package}} -spec: - replicas: 3 - template: - metadata: - labels: - app: {{.File.Package}} - spec: - containers: - - name: nginx - image: nginx - ports: - - containerPort: 80 \ No newline at end of file diff --git a/examples/k8s/templates/{{.File.Package}}/svc.yaml.tmpl b/examples/k8s/templates/{{.File.Package}}/svc.yaml.tmpl deleted file mode 100644 index 1fc1d86..0000000 --- a/examples/k8s/templates/{{.File.Package}}/svc.yaml.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{.File.Package}} -spec: - selector: - app: {{.File.Package}} - ports: - - name: http - port: 80 \ No newline at end of file diff --git a/examples/single-package-mode/Makefile b/examples/single-package-mode/Makefile deleted file mode 100644 index d5df807..0000000 --- a/examples/single-package-mode/Makefile +++ /dev/null @@ -1,26 +0,0 @@ -.PHONY: re -re: clean build test - -.PHONY: build -build: - @mkdir -p output - - @# proto-gen-go - protoc -I./proto --go_out=plugins=grpc:output proto/aaa/aaa.proto - protoc -I./proto --go_out=plugins=grpc:output proto/bbb/bbb.proto - @rm -rf output/aaa output/bbb - @mv output/moul.io/protoc-gen-gotemplate/examples/single-package-mode/output/* output/ - @rm -rf output/github.com - - @# protoc-gen-gotemplate - protoc -I./proto --gotemplate_out=template_dir=templates,single-package-mode=true:output proto/bbb/bbb.proto - gofmt -w . - -.PHONY: test -test: - go test -i ./output/... - go test -v ./output/... - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/single-package-mode/output/aaa/aaa.pb.go b/examples/single-package-mode/output/aaa/aaa.pb.go deleted file mode 100644 index 1db6c9f..0000000 --- a/examples/single-package-mode/output/aaa/aaa.pb.go +++ /dev/null @@ -1,84 +0,0 @@ -// Code generated by protoc-gen-go. -// source: aaa/aaa.proto -// DO NOT EDIT! - -/* -Package aaa is a generated protocol buffer package. - -It is generated from these files: - aaa/aaa.proto - -It has these top-level messages: - AaaRequest - AaaReply -*/ -package aaa - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type AaaRequest struct { - Blah string `protobuf:"bytes,1,opt,name=blah" json:"blah,omitempty"` -} - -func (m *AaaRequest) Reset() { *m = AaaRequest{} } -func (m *AaaRequest) String() string { return proto.CompactTextString(m) } -func (*AaaRequest) ProtoMessage() {} -func (*AaaRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *AaaRequest) GetBlah() string { - if m != nil { - return m.Blah - } - return "" -} - -type AaaReply struct { - Error string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` -} - -func (m *AaaReply) Reset() { *m = AaaReply{} } -func (m *AaaReply) String() string { return proto.CompactTextString(m) } -func (*AaaReply) ProtoMessage() {} -func (*AaaReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *AaaReply) GetError() string { - if m != nil { - return m.Error - } - return "" -} - -func init() { - proto.RegisterType((*AaaRequest)(nil), "the.aaa.package.AaaRequest") - proto.RegisterType((*AaaReply)(nil), "the.aaa.package.AaaReply") -} - -func init() { proto.RegisterFile("aaa/aaa.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 172 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x2c, 0x8e, 0x31, 0x0b, 0xc2, 0x30, - 0x10, 0x46, 0x29, 0xa8, 0x68, 0x40, 0x84, 0xe0, 0xe0, 0x58, 0x3a, 0xb9, 0xa4, 0x19, 0xfc, 0x05, - 0xba, 0x8b, 0xd0, 0xd1, 0xed, 0x5a, 0x8f, 0xb4, 0x78, 0xe9, 0xc5, 0xf4, 0x02, 0xfa, 0xef, 0xc5, - 0xd8, 0xed, 0x1e, 0xbc, 0xe3, 0x7b, 0x6a, 0x0b, 0x00, 0x16, 0x00, 0xea, 0x10, 0x59, 0x58, 0xef, - 0xa4, 0xc7, 0x3a, 0x23, 0x74, 0x4f, 0x70, 0x58, 0x95, 0x4a, 0x9d, 0x01, 0x1a, 0x7c, 0x25, 0x9c, - 0x44, 0x6b, 0xb5, 0x68, 0x09, 0xfa, 0x43, 0x51, 0x16, 0xc7, 0x4d, 0x93, 0xef, 0xaa, 0x54, 0xeb, - 0x6c, 0x04, 0xfa, 0xe8, 0xbd, 0x5a, 0x62, 0x8c, 0x1c, 0x67, 0xe1, 0x0f, 0x97, 0xdb, 0xfd, 0xea, - 0x06, 0xe9, 0x53, 0x5b, 0x77, 0xec, 0xad, 0xe7, 0x44, 0x36, 0xaf, 0x75, 0xc6, 0xe1, 0x68, 0x1c, - 0x0b, 0xfa, 0x40, 0x20, 0x68, 0xf1, 0x0d, 0x3e, 0x10, 0x4e, 0x76, 0x1a, 0x46, 0x47, 0x68, 0xe6, - 0x08, 0xe3, 0xf9, 0x81, 0x96, 0x93, 0x84, 0x24, 0xbf, 0xd6, 0x76, 0x95, 0xdf, 0x4f, 0xdf, 0x00, - 0x00, 0x00, 0xff, 0xff, 0x93, 0x24, 0x48, 0x9c, 0xbd, 0x00, 0x00, 0x00, -} diff --git a/examples/single-package-mode/output/bbb/bbb.pb.go b/examples/single-package-mode/output/bbb/bbb.pb.go deleted file mode 100644 index 333920f..0000000 --- a/examples/single-package-mode/output/bbb/bbb.pb.go +++ /dev/null @@ -1,200 +0,0 @@ -// Code generated by protoc-gen-go. -// source: bbb/bbb.proto -// DO NOT EDIT! - -/* -Package bbb is a generated protocol buffer package. - -It is generated from these files: - bbb/bbb.proto - -It has these top-level messages: - BbbRequest - BbbReply -*/ -package bbb - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import the_aaa_package "moul.io/protoc-gen-gotemplate/examples/single-package-mode/output/aaa" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type BbbRequest struct { - Enable bool `protobuf:"varint,1,opt,name=enable" json:"enable,omitempty"` -} - -func (m *BbbRequest) Reset() { *m = BbbRequest{} } -func (m *BbbRequest) String() string { return proto.CompactTextString(m) } -func (*BbbRequest) ProtoMessage() {} -func (*BbbRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *BbbRequest) GetEnable() bool { - if m != nil { - return m.Enable - } - return false -} - -type BbbReply struct { - Done bool `protobuf:"varint,1,opt,name=done" json:"done,omitempty"` -} - -func (m *BbbReply) Reset() { *m = BbbReply{} } -func (m *BbbReply) String() string { return proto.CompactTextString(m) } -func (*BbbReply) ProtoMessage() {} -func (*BbbReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *BbbReply) GetDone() bool { - if m != nil { - return m.Done - } - return false -} - -func init() { - proto.RegisterType((*BbbRequest)(nil), "the.bbb.package.BbbRequest") - proto.RegisterType((*BbbReply)(nil), "the.bbb.package.BbbReply") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// Client API for BbbService service - -type BbbServiceClient interface { - Aaa(ctx context.Context, in *the_aaa_package.AaaRequest, opts ...grpc.CallOption) (*the_aaa_package.AaaReply, error) - Bbb(ctx context.Context, in *BbbRequest, opts ...grpc.CallOption) (*BbbReply, error) -} - -type bbbServiceClient struct { - cc *grpc.ClientConn -} - -func NewBbbServiceClient(cc *grpc.ClientConn) BbbServiceClient { - return &bbbServiceClient{cc} -} - -func (c *bbbServiceClient) Aaa(ctx context.Context, in *the_aaa_package.AaaRequest, opts ...grpc.CallOption) (*the_aaa_package.AaaReply, error) { - out := new(the_aaa_package.AaaReply) - err := grpc.Invoke(ctx, "/the.bbb.package.BbbService/Aaa", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *bbbServiceClient) Bbb(ctx context.Context, in *BbbRequest, opts ...grpc.CallOption) (*BbbReply, error) { - out := new(BbbReply) - err := grpc.Invoke(ctx, "/the.bbb.package.BbbService/Bbb", in, out, c.cc, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Server API for BbbService service - -type BbbServiceServer interface { - Aaa(context.Context, *the_aaa_package.AaaRequest) (*the_aaa_package.AaaReply, error) - Bbb(context.Context, *BbbRequest) (*BbbReply, error) -} - -func RegisterBbbServiceServer(s *grpc.Server, srv BbbServiceServer) { - s.RegisterService(&_BbbService_serviceDesc, srv) -} - -func _BbbService_Aaa_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(the_aaa_package.AaaRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(BbbServiceServer).Aaa(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/the.bbb.package.BbbService/Aaa", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(BbbServiceServer).Aaa(ctx, req.(*the_aaa_package.AaaRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _BbbService_Bbb_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(BbbRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(BbbServiceServer).Bbb(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/the.bbb.package.BbbService/Bbb", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(BbbServiceServer).Bbb(ctx, req.(*BbbRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _BbbService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "the.bbb.package.BbbService", - HandlerType: (*BbbServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Aaa", - Handler: _BbbService_Aaa_Handler, - }, - { - MethodName: "Bbb", - Handler: _BbbService_Bbb_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "bbb/bbb.proto", -} - -func init() { proto.RegisterFile("bbb/bbb.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 242 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0x41, 0x4b, 0x03, 0x31, - 0x10, 0x85, 0x59, 0x2a, 0xa5, 0x04, 0x8a, 0x90, 0x83, 0x68, 0x05, 0x91, 0xe2, 0xc1, 0xcb, 0x26, - 0xa0, 0xe7, 0x1e, 0xba, 0x77, 0x11, 0xea, 0xcd, 0xdb, 0xcc, 0x76, 0x48, 0x17, 0x93, 0x9d, 0xd8, - 0x9d, 0x88, 0xfd, 0x0b, 0xfe, 0x6a, 0xd9, 0x34, 0xa2, 0x60, 0x6f, 0x19, 0xde, 0xbc, 0xf7, 0xbd, - 0x8c, 0x9a, 0x23, 0xa2, 0x45, 0x44, 0x13, 0xf7, 0x2c, 0xac, 0xcf, 0x65, 0x47, 0x26, 0x8f, 0xd0, - 0xbe, 0x81, 0xa3, 0xc5, 0x1c, 0x00, 0x2c, 0x00, 0x1c, 0xf5, 0xe5, 0x9d, 0x52, 0x0d, 0xe2, 0x86, - 0xde, 0x13, 0x0d, 0xa2, 0x2f, 0xd4, 0x94, 0x7a, 0x40, 0x4f, 0x97, 0xd5, 0x6d, 0x75, 0x3f, 0xdb, - 0x94, 0x69, 0x79, 0xa3, 0x66, 0x79, 0x2b, 0xfa, 0x83, 0xd6, 0xea, 0x6c, 0xcb, 0xfd, 0xcf, 0x46, - 0x7e, 0x3f, 0x7c, 0x55, 0x39, 0xe6, 0x85, 0xf6, 0x1f, 0x5d, 0x4b, 0x7a, 0xa5, 0x26, 0x6b, 0x00, - 0x7d, 0x6d, 0x46, 0x78, 0x66, 0x1d, 0xe1, 0x66, 0x0d, 0x50, 0x50, 0x8b, 0xab, 0xd3, 0xe2, 0x48, - 0x58, 0xa9, 0x49, 0x83, 0x58, 0xec, 0x7f, 0xba, 0x9b, 0xdf, 0xa6, 0xc5, 0xfe, 0x4f, 0x8c, 0xfe, - 0xd0, 0x3c, 0xbf, 0x3e, 0xb9, 0x4e, 0x76, 0x09, 0x4d, 0xcb, 0xc1, 0x06, 0x4e, 0xde, 0xe6, 0xbf, - 0xb6, 0xb5, 0xa3, 0xbe, 0x76, 0x2c, 0x14, 0xa2, 0x07, 0x21, 0x4b, 0x9f, 0x10, 0xa2, 0xa7, 0xc1, - 0x0e, 0x5d, 0xef, 0x3c, 0xd5, 0x25, 0xa9, 0x0e, 0xbc, 0x25, 0xcb, 0x49, 0x62, 0x92, 0xf1, 0x92, - 0x38, 0xcd, 0xf6, 0xc7, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe0, 0xe1, 0x26, 0xcb, 0x5b, 0x01, - 0x00, 0x00, -} diff --git a/examples/single-package-mode/output/bbb/service.go b/examples/single-package-mode/output/bbb/service.go deleted file mode 100644 index e5f956d..0000000 --- a/examples/single-package-mode/output/bbb/service.go +++ /dev/null @@ -1,19 +0,0 @@ -// file generated with protoc-gen-gotemplate -package bbb - -import ( - "fmt" - - "golang.org/x/net/context" - "moul.io/protoc-gen-gotemplate/examples/single-package-mode/output/aaa" -) - -type Service struct{} - -func (service Service) Aaa(ctx context.Context, input *aaa.AaaRequest) (*aaa.AaaReply, error) { - return nil, fmt.Errorf("method not implemented") -} - -func (service Service) Bbb(ctx context.Context, input *BbbRequest) (*BbbReply, error) { - return nil, fmt.Errorf("method not implemented") -} diff --git a/examples/single-package-mode/proto/aaa/aaa.proto b/examples/single-package-mode/proto/aaa/aaa.proto deleted file mode 100644 index 2bc1230..0000000 --- a/examples/single-package-mode/proto/aaa/aaa.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -option go_package = "moul.io/protoc-gen-gotemplate/examples/single-package-mode/output/aaa"; - -package the.aaa.package; - -message AaaRequest { - string blah = 1; -} -message AaaReply { - string error = 1; -} diff --git a/examples/single-package-mode/proto/bbb/bbb.proto b/examples/single-package-mode/proto/bbb/bbb.proto deleted file mode 100644 index 2da408b..0000000 --- a/examples/single-package-mode/proto/bbb/bbb.proto +++ /dev/null @@ -1,19 +0,0 @@ -syntax = "proto3"; - -package the.bbb.package; - -option go_package = "moul.io/protoc-gen-gotemplate/examples/single-package-mode/output/bbb"; - -import "aaa/aaa.proto"; - -service BbbService { - rpc Aaa(the.aaa.package.AaaRequest) returns (the.aaa.package.AaaReply); - rpc Bbb(BbbRequest) returns (BbbReply); -} - -message BbbRequest { - bool enable = 1; -} -message BbbReply { - bool done = 1; -} diff --git a/examples/single-package-mode/templates/%7B%7B.File.Name%7Cdir%7D%7D/service.go.tmpl b/examples/single-package-mode/templates/%7B%7B.File.Name%7Cdir%7D%7D/service.go.tmpl deleted file mode 100644 index 22ea037..0000000 --- a/examples/single-package-mode/templates/%7B%7B.File.Name%7Cdir%7D%7D/service.go.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -// file generated with protoc-gen-gotemplate -package {{.File.Name | dir}} - -{{- $file := .File}} -{{- $currentFile := $file.Name | getProtoFile}} - -import ( - "fmt" - - "golang.org/x/net/context" - - {{- range .File.Dependency}} - {{- $dependency := . | getProtoFile}} - {{$dependency.GoPkg}} - {{end}} -) - -type Service struct {} - -{{- range .Service.Method}} -{{- $in := .InputType | getMessageType $file}} -{{- $out := .OutputType | getMessageType $file}} -func (service Service) {{.Name}}(ctx context.Context, input *{{$in.GoType $currentFile.GoPkg.Path}}) (*{{$out.GoType $currentFile.GoPkg.Path}}, error) { - return nil, fmt.Errorf("method not implemented") -} -{{end}} diff --git a/examples/sitemap/Makefile b/examples/sitemap/Makefile deleted file mode 100644 index d9d5264..0000000 --- a/examples/sitemap/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -.PHONY: build -build: - protoc -I. --gotemplate_out=template_dir=.:. sitemap.proto diff --git a/examples/sitemap/sitemap.proto b/examples/sitemap/sitemap.proto deleted file mode 100644 index af5e9e7..0000000 --- a/examples/sitemap/sitemap.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package sitemap; - -service DummyService { - rpc Posts(Request) returns (Response) {} - rpc Authors(Request) returns (Response) {} - rpc Comments(Request) returns (Response) {} -} - -message Request {} -message Response {} \ No newline at end of file diff --git a/examples/sitemap/sitemap.xml b/examples/sitemap/sitemap.xml deleted file mode 100644 index 9c6bc27..0000000 --- a/examples/sitemap/sitemap.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - /posts - 0.5 - monthly - 2017-05-19 - - - /authors - 0.5 - monthly - 2017-05-19 - - - /comments - 0.5 - monthly - 2017-05-19 - - diff --git a/examples/sitemap/sitemap.xml.tmpl b/examples/sitemap/sitemap.xml.tmpl deleted file mode 100644 index 8bb4511..0000000 --- a/examples/sitemap/sitemap.xml.tmpl +++ /dev/null @@ -1,9 +0,0 @@ - -{{range .Service.Method}} - - /{{.Name | lower}} - 0.5 - monthly - {{now | date "2006-01-02"}} - {{end}} - diff --git a/examples/time/Makefile b/examples/time/Makefile deleted file mode 100644 index b0f98f5..0000000 --- a/examples/time/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: build -build: - mkdir -p output - protoc -I. --gotemplate_out=template_dir=templates,debug=true:output proto/*.proto - - -.PHONY: re -re: clean build - - -.PHONY: clean -clean: - rm -rf output diff --git a/examples/time/output/time.go b/examples/time/output/time.go deleted file mode 100644 index 56b6386..0000000 --- a/examples/time/output/time.go +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by protoc-gen-gotemplate -package foo - -import ( - "github.com/golang/protobuf/ptypes/timestamp" -) - -type Repository interface { - GetFoo(timestamp *timestamp.Timestamp) (*timestamp.Timestamp, error) -} diff --git a/examples/time/proto/time.proto b/examples/time/proto/time.proto deleted file mode 100644 index e5d1d6a..0000000 --- a/examples/time/proto/time.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package foo; - -import "google/protobuf/timestamp.proto"; - -message GetFooRequest { google.protobuf.Timestamp timestamp = 1;} -message GetFooResponse { string result = 1;} - -service foosvc { - rpc GetFoo (GetFooRequest) returns (GetFooResponse){} -} \ No newline at end of file diff --git a/examples/time/templates/time.go.tmpl b/examples/time/templates/time.go.tmpl deleted file mode 100644 index 1a52a96..0000000 --- a/examples/time/templates/time.go.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by protoc-gen-gotemplate -package {{.File.Package}} - -import ( - "github.com/golang/protobuf/ptypes/timestamp" -) - -type Repository interface { - {{range $m := .Service.Method}}{{with $t := $m.InputType | getMessageType $.File}} {{$m.Name}}({{range $f := $t.Field}}{{$f.Name|lowerCamelCase}} {{$f| goTypeWithPackage }} {{end}}) ({{with $out := $m.OutputType | getMessageType $.File}}{{range $f := $out.Field}}{{$f | goTypeWithPackage}}, {{end}}{{end}} error){{end}}{{end}} -} \ No newline at end of file diff --git a/generate.go b/generate.go new file mode 100644 index 0000000..f7fe84c --- /dev/null +++ b/generate.go @@ -0,0 +1,3 @@ +package main + +//go:generate go run -tags dev assets.go diff --git a/go.mod b/go.mod index a3cdbf4..f51d6a9 100644 --- a/go.mod +++ b/go.mod @@ -1,21 +1,17 @@ -module moul.io/protoc-gen-gotemplate +module github.com/unistack-org/protoc-gen-micro + +go 1.15 require ( - github.com/Masterminds/goutils v1.1.0 // indirect - github.com/Masterminds/semver v1.5.0 // indirect - github.com/Masterminds/sprig v2.22.0+incompatible + github.com/Masterminds/sprig/v3 v3.2.0 github.com/go-git/go-git/v5 v5.2.0 - github.com/gobuffalo/packr/v2 v2.8.0 - github.com/golang/protobuf v1.3.2 - github.com/google/uuid v1.1.1 // indirect - github.com/gorilla/handlers v1.4.2 - github.com/gorilla/mux v1.7.3 + github.com/golang/protobuf v1.4.3 github.com/grpc-ecosystem/grpc-gateway v1.12.1 - github.com/huandu/xstrings v1.3.0 - github.com/mitchellh/copystructure v1.0.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.1.0 + github.com/huandu/xstrings v1.3.1 github.com/mitchellh/reflectwalk v1.0.1 // indirect - google.golang.org/genproto v0.0.0-20200113173426-e1de0a7b01eb - gopkg.in/yaml.v2 v2.2.7 // indirect + github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect + github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 // indirect + google.golang.org/genproto v0.0.0-20210106152847-07624b53cd92 + google.golang.org/protobuf v1.25.0 ) - -go 1.13 diff --git a/go.sum b/go.sum index a4aff9a..5654545 100644 --- a/go.sum +++ b/go.sum @@ -1,44 +1,73 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= -github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig/v3 v3.2.0 h1:P1ekkbuU73Ui/wS0nK1HOM37hh4xdfZo485UPf8rc+Y= +github.com/Masterminds/sprig/v3 v3.2.0/go.mod h1:tWhwTbUTndesPNeF0C900vKoq283u6zp4APT9vaF3SI= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= @@ -51,63 +80,85 @@ github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbK github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2JgI= github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gobuffalo/logger v1.0.3 h1:YaXOTHNPCvkqqA7w05A4v0k2tCdpr+sgFlgINbQ6gqc= -github.com/gobuffalo/logger v1.0.3/go.mod h1:SoeejUwldiS7ZsyCBphOGURmWdwUFXs0J7TCjEhjKxM= -github.com/gobuffalo/packd v1.0.0 h1:6ERZvJHfe24rfFmA9OaoKBdC7+c9sydrytMg8SdFGBM= -github.com/gobuffalo/packd v1.0.0/go.mod h1:6VTc4htmJRFB7u1m/4LeMTWjFoYrUiBkU9Fdec9hrhI= -github.com/gobuffalo/packr/v2 v2.8.0 h1:IULGd15bQL59ijXLxEvA5wlMxsmx/ZkQv9T282zNVIY= -github.com/gobuffalo/packr/v2 v2.8.0/go.mod h1:PDk2k3vGevNE3SwVyVRgQCCXETC9SaONCNSXT1Q8M1g= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gorilla/handlers v1.4.2 h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg= -github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= -github.com/gorilla/mux v1.7.3 h1:gnP5JzjVOuiZD07fKKToCAOjS0yOpj/qPETTXCCS6hw= -github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/grpc-ecosystem/grpc-gateway v1.12.1 h1:zCy2xE9ablevUOrUZc3Dl72Dt+ya2FNAvC2yLYMHzi4= github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/huandu/xstrings v1.3.0 h1:gvV6jG9dTgFEncxo+AF7PH6MZXi/vZl25owA/8Dg8Wo= -github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.1.0 h1:EhTvIsn53GrBLl45YVHk25cUHQHwlJfq2y8b7W5IpVY= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.1.0/go.mod h1:ly5QWKtiqC7tGfzgXYtpoZYmEWx5Z82/b18ASEL+yGc= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs= +github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/karrick/godirwalk v1.15.3 h1:0a2pXOgtB16CqIqXTiT7+K9L73f74n/aNQUnH6Ortew= -github.com/karrick/godirwalk v1.15.3/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -115,149 +166,298 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/markbates/errx v1.1.0 h1:QDFeR+UP95dO12JgW+tgi2UVfo0V8YBHiUIOaeBPiEI= -github.com/markbates/errx v1.1.0/go.mod h1:PLa46Oex9KNbVDZhKel8v1OT7hD5JZ2eI7AHhA0wswc= -github.com/markbates/oncer v1.0.0 h1:E83IaVAHygyndzPimgUYJjbshhDTALZyXxvk9FOlQRY= -github.com/markbates/oncer v1.0.0/go.mod h1:Z59JA581E9GP6w96jai+TGqafHPW+cPfRxz2aSZ0mcI= -github.com/markbates/safe v1.0.1 h1:yjZkbvRM6IzKj9tlu/zMJLS0n/V351OZWRnF3QfaUxI= -github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.5.2 h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w= -github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.6/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 h1:pXY9qYc/MP5zdvqWEUH6SjNiu7VhSjuVFTFiTcphaLU= +github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70= github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191122220453-ac88ee75c92c/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202 h1:VvcQYSHwXgi7W+TpUR6A9g6Up98WAHf3f/ulnJ62IyA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208 h1:qwRHBd0NqMbJxfbotnDhm2ByMI1Shq4Y6oRJo21SGJA= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894 h1:Cz4ceDQGXuKRnVBDTS23GTn/pU5OE2C0WrNTOYK1Uuc= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642 h1:B6caxRw+hozq68X2MY7jEpZh/cr4/aHLv9xU8Kkadrw= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200308013534-11ec41452d41/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20200113173426-e1de0a7b01eb h1:EsMpWw4S8DM2QYm5idfmmWsv2N57GWi2tx3p96Gpja4= -google.golang.org/genproto v0.0.0-20200113173426-e1de0a7b01eb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210106152847-07624b53cd92 h1:jOTk2Z6KYaWoptUFqZ167cS8peoUPjFEXrsqfVkkCGc= +google.golang.org/genproto v0.0.0-20210106152847-07624b53cd92/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.0.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= @@ -266,16 +466,21 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo= -gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/helpers/helpers.go b/helpers/helpers.go index 798401d..75babed 100644 --- a/helpers/helpers.go +++ b/helpers/helpers.go @@ -9,12 +9,14 @@ import ( "sync" "text/template" - "github.com/Masterminds/sprig" + "github.com/Masterminds/sprig/v3" "github.com/golang/protobuf/proto" "github.com/golang/protobuf/protoc-gen-go/descriptor" ggdescriptor "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" + openapi_options "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" "github.com/huandu/xstrings" options "google.golang.org/genproto/googleapis/api/annotations" + // "google.golang.org/protobuf/proto" ) var jsReservedRe = regexp.MustCompile(`(^|[^A-Za-z])(do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)($|[^A-Za-z])`) @@ -23,6 +25,12 @@ var ( registry *ggdescriptor.Registry // some helpers need access to registry ) +type HttpOption struct { + Path []string + Method string + Body string +} + var ProtoHelpersFuncMap = template.FuncMap{ "string": func(i interface { String() string @@ -65,6 +73,9 @@ var ProtoHelpersFuncMap = template.FuncMap{ "join": func(sep string, a ...string) string { return strings.Join(a, sep) }, + "joinQuotes": func(a []string) string { + return fmt.Sprintf(`"%s"`, strings.Join(a, `", "`)) + }, "upperFirst": func(s string) string { return strings.ToUpper(s[:1]) + s[1:] }, @@ -138,6 +149,8 @@ var ProtoHelpersFuncMap = template.FuncMap{ "jsType": jsType, "jsSuffixReserved": jsSuffixReservedKeyword, "namespacedFlowType": namespacedFlowType, + "openapiOption": openapiOption, + "httpOption": httpOption, "httpVerb": httpVerb, "httpPath": httpPath, "httpPathsAdditionalBindings": httpPathsAdditionalBindings, @@ -1318,6 +1331,34 @@ func httpPathsAdditionalBindings(m *descriptor.MethodDescriptorProto) []string { return httpPaths } +func openapiOption(m *descriptor.MethodDescriptorProto) *openapi_options.Operation { + + ext, err := proto.GetExtension(m.Options, openapi_options.E_Openapiv2Operation) + if err != nil { + panic(err.Error()) + } + opts, ok := ext.(*openapi_options.Operation) + if !ok { + panic(fmt.Sprintf("extension is %T; want an Openapiv2Operation", ext)) + } + + return opts +} + +func httpOption(m *descriptor.MethodDescriptorProto) *HttpOption { + + opt := &HttpOption{Method: httpVerb(m), Body: httpBody(m)} + if path := httpPath(m); path != "" { + opt.Path = append(opt.Path, path) + } + + for _, path := range httpPathsAdditionalBindings(m) { + opt.Path = append(opt.Path, path) + } + + return opt +} + func httpVerb(m *descriptor.MethodDescriptorProto) string { ext, err := proto.GetExtension(m.Options, options.E_Http) @@ -1360,15 +1401,41 @@ func httpBody(m *descriptor.MethodDescriptorProto) string { return opts.Body } +func urlVarsFields(path string, d *ggdescriptor.Message) []*descriptor.FieldDescriptorProto { + var vars []*descriptor.FieldDescriptorProto + for _, field := range d.Field { + if !isFieldMessage(field) { + if strings.Contains(path, fmt.Sprintf("{%s}", *field.Name)) { + vars = append(vars, field) + continue + } + // JSON name field is checked as fallback. The value set by the protocol compiler. + // If the user has set a "json_name" option on a field, that option's value + // will be used in this check. By default value in this property will be field name in + // camelCase format. + if strings.Contains(path, fmt.Sprintf("{%s}", *field.JsonName)) { + vars = append(vars, field) + } + } + } + return vars +} + func urlHasVarsFromMessage(path string, d *ggdescriptor.Message) bool { for _, field := range d.Field { if !isFieldMessage(field) { if strings.Contains(path, fmt.Sprintf("{%s}", *field.Name)) { return true } + // JSON name field is checked as fallback. The value set by the protocol compiler. + // If the user has set a "json_name" option on a field, that option's value + // will be used in this check. By default value in this property will be field name in + // camelCase format. + if strings.Contains(path, fmt.Sprintf("{%s}", *field.JsonName)) { + return true + } } } - return false } @@ -1416,11 +1483,11 @@ func formatID(base string, formatted string) string { func replaceDict(src string, dict map[string]interface{}) string { for old, v := range dict { - new, ok := v.(string) + n, ok := v.(string) if !ok { continue } - src = strings.Replace(src, old, new, -1) + src = strings.Replace(src, old, n, -1) } return src } diff --git a/main.go b/main.go index ea79f97..ccfe38f 100644 --- a/main.go +++ b/main.go @@ -1,18 +1,20 @@ -package main // import "moul.io/protoc-gen-gotemplate" +package main import ( + "io" "io/ioutil" "log" "net/url" "os" + "path/filepath" "strings" - "github.com/golang/protobuf/proto" "github.com/golang/protobuf/protoc-gen-go/generator" plugin_go "github.com/golang/protobuf/protoc-gen-go/plugin" ggdescriptor "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor" - - pgghelpers "moul.io/protoc-gen-gotemplate/helpers" + "github.com/unistack-org/protoc-gen-micro/assets" + pgghelpers "github.com/unistack-org/protoc-gen-micro/helpers" + "google.golang.org/protobuf/proto" ) var ( @@ -44,13 +46,13 @@ func main() { // Parse parameters var ( - templateDir = "./templates" + templateDir = "" + templateRepo = "" destinationDir = "." debug = false all = false singlePackageMode = false fileMode = false - templateRepo = "" ) if parameter := g.Request.GetParameter(); parameter != "" { for _, param := range strings.Split(parameter, ",") { @@ -98,6 +100,8 @@ func main() { log.Printf("Err: invalid value for template_repo: %q", parts[1]) } templateRepo = parts[1] + case "paths": + // TODO: handle paths=source_relative default: log.Printf("Err: unknown parameter: %q", param) } @@ -122,7 +126,7 @@ func main() { } } - if templateRepo != "" { + if templateDir == "" || templateRepo != "" { if templateDir, err = ioutil.TempDir("", "gen-*"); err != nil { g.Error(err, "failed to create tmp dir") } @@ -132,8 +136,51 @@ func main() { } }() - if err = clone(templateRepo, templateDir); err != nil { - g.Error(err, "failed to clone repo") + if templateRepo != "" { + if err = clone(templateRepo, templateDir); err != nil { + g.Error(err, "failed to clone repo") + } + } else { + dir, err := assets.Assets.Open("/") + if err != nil { + g.Error(err, "failed to open assets dir") + } + fi, err := dir.Readdir(-1) + if err != nil { + g.Error(err, "failed to get assets files") + } + + for _, f := range fi { + if debug { + log.Printf("copy template %s", f.Name()) + } + fpath := filepath.Join(templateDir, f.Name()) + if err = os.MkdirAll(filepath.Dir(fpath), os.FileMode(0755)); err != nil { + g.Error(err, "failed to create nested dir") + } + if f.IsDir() { + continue + } + fd, err := os.OpenFile(fpath, os.O_CREATE|os.O_TRUNC|os.O_RDWR, f.Mode()) + if err != nil { + g.Error(err, "failed to create template file") + } + fs, err := assets.Assets.Open(f.Name()) + if err != nil { + g.Error(err, "failed to open template file") + } + if _, err = io.Copy(fd, fs); err != nil { + fd.Close() + fs.Close() + g.Error(err, "failed to copy template file") + } + if err = fd.Close(); err != nil { + g.Error(err, "failed to flush template file") + } + if err = fs.Close(); err != nil { + g.Error(err, "failed to flush template file") + } + } } } diff --git a/rules.mk b/rules.mk deleted file mode 100644 index 020d7aa..0000000 --- a/rules.mk +++ /dev/null @@ -1,270 +0,0 @@ -# +--------------------------------------------------------------+ -# | * * * moul.io/rules.mk | -# +--------------------------------------------------------------+ -# | | -# | ++ ______________________________________ | -# | ++++ / \ | -# | ++++ | | | -# | ++++++++++ | https://moul.io/rules.mk is a set | | -# | +++ | | of common Makefile rules that can | | -# | ++ | | be configured from the Makefile | | -# | + -== ==| | or with environment variables. | | -# | ( <*> <*> | | | -# | | | /| Manfred Touron | | -# | | _) / | manfred.life | | -# | | +++ / \______________________________________/ | -# | \ =+ / | -# | \ + | -# | |\++++++ | -# | | ++++ ||// | -# | ___| |___ _||/__ __| -# | / --- \ \| ||| __ _ ___ __ __/ /| -# |/ | | \ \ / / ' \/ _ \/ // / / | -# || | | | | | /_/_/_/\___/\_,_/_/ | -# +--------------------------------------------------------------+ - -all: help - -## -## Common helpers -## - -rwildcard = $(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $(subst *,%,$2),$d)) - -## -## rules.mk -## -ifneq ($(wildcard rules.mk),) -.PHONY: rulesmk.bumpdeps -rulesmk.bumpdeps: - wget -O rules.mk https://raw.githubusercontent.com/moul/rules.mk/master/rules.mk -BUMPDEPS_STEPS += rulesmk.bumpdeps -endif - -## -## Maintainer -## - -ifneq ($(wildcard .git/HEAD),) -.PHONY: generate.authors -generate.authors: AUTHORS -AUTHORS: .git/ - echo "# This file lists all individuals having contributed content to the repository." > AUTHORS - echo "# For how it is generated, see 'https://github.com/moul/rules.mk'" >> AUTHORS - echo >> AUTHORS - git log --format='%aN <%aE>' | LC_ALL=C.UTF-8 sort -uf >> AUTHORS -GENERATE_STEPS += generate.authors -endif - -## -## Golang -## - -ifndef GOPKG -ifneq ($(wildcard go.mod),) -GOPKG = $(shell sed '/module/!d;s/^omdule\ //' go.mod) -endif -endif -ifdef GOPKG -GO ?= go -GOPATH ?= $(HOME)/go -GOLIBS ?= $(shell find . -type f -name "go.mod" | grep -v /vendor/ | sed 's@/[^/]*$$@@' | sort | uniq) -GO_INSTALL_OPTS ?= -GO_TEST_OPTS ?= -test.timeout=30s - -ifdef GOBINS -.PHONY: go.install -go.install: - @set -e; for dir in $(GOBINS); do ( set -xe; \ - cd $$dir; \ - $(GO) install $(GO_INSTALL_OPTS) .; \ - ); done -INSTALL_STEPS += go.install - -.PHONY: go.release -go.release: - goreleaser --snapshot --skip-publish --rm-dist - @echo -n "Do you want to release? [y/N] " && read ans && \ - if [ $${ans:-N} = y ]; then set -xe; goreleaser --rm-dist; fi -RELEASE_STEPS += go.release -endif - -.PHONY: go.unittest coverage.txt -go.unittest: coverage.txt -coverage.txt: - @rm -f /tmp/coverage.txt - @touch /tmp/coverage.txt - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - $(GO) test $(GO_TEST_OPTS) -cover -coverprofile=/tmp/profile.out -covermode=atomic -race ./...; \ - if [ -f /tmp/profile.out ]; then \ - cat /tmp/profile.out >> /tmp/coverage.txt; \ - rm -f /tmp/profile.out; \ - fi); done - mv /tmp/coverage.txt . - -.PHONY: go.coverfunc -go.coverfunc: coverage.txt - go tool cover -func=./coverage.txt | grep -v .pb.go: | grep -v .pb.gw.go: - -.PHONY: go.lint -go.lint: - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - golangci-lint run --verbose ./...; \ - ); done - -.PHONY: go.tidy -go.tidy: - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - $(GO) mod tidy; \ - ); done - -.PHONY: go.build -go.build: - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - $(GO) build ./...; \ - ); done - -.PHONY: go.bump-deps -go.bumpdeps: - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - $(GO) get -u ./...; \ - ); done - -.PHONY: go.bump-deps -go.fmt: - if ! command -v goimports &>/dev/null; then GO111MODULE=off go get golang.org/x/tools/cmd/goimports; fi - @set -e; for dir in $(GOLIBS); do ( set -xe; \ - cd $$dir; \ - goimports -w . \ - ); done - -BUILD_STEPS += go.build -BUMPDEPS_STEPS += go.bumpdeps -TIDY_STEPS += go.tidy -LINT_STEPS += go.lint -UNITTEST_STEPS += go.unittest -FMT_STEPS += go.fmt -endif - -## -## Node -## - -ifndef NPM_PACKAGES -ifneq ($(wildcard package.json),) -NPM_PACKAGES = . -endif -endif -ifdef NPM_PACKAGES -.PHONY: npm.publish -npm.publish: - @echo -n "Do you want to npm publish? [y/N] " && read ans && \ - @if [ $${ans:-N} = y ]; then \ - set -e; for dir in $(NPM_PACKAGES); do ( set -xe; \ - cd $$dir; \ - npm publish --access=public; \ - ); done; \ - fi -RELEASE_STEPS += npm.publish -endif - -## -## Docker -## - -ifndef DOCKER_IMAGE -ifneq ($(wildcard Dockerfile),) -DOCKER_IMAGE = $(notdir $(PWD)) -endif -endif -ifdef DOCKER_IMAGE -ifneq ($(DOCKER_IMAGE),none) -.PHONY: docker.build -docker.build: - docker build \ - --build-arg VCS_REF=`git rev-parse --short HEAD` \ - --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ - --build-arg VERSION=`git describe --tags --always` \ - -t $(DOCKER_IMAGE) . - -BUILD_STEPS += docker.build -endif -endif - -## -## Common -## - -TEST_STEPS += $(UNITTEST_STEPS) -TEST_STEPS += $(LINT_STEPS) -TEST_STEPS += $(TIDY_STEPS) - -ifneq ($(strip $(TEST_STEPS)),) -.PHONY: test -test: $(PRE_TEST_STEPS) $(TEST_STEPS) -endif - -ifdef INSTALL_STEPS -.PHONY: install -install: $(PRE_INSTALL_STEPS) $(INSTALL_STEPS) -endif - -ifdef UNITTEST_STEPS -.PHONY: unittest -unittest: $(PRE_UNITTEST_STEPS) $(UNITTEST_STEPS) -endif - -ifdef LINT_STEPS -.PHONY: lint -lint: $(PRE_LINT_STEPS) $(FMT_STEPS) $(LINT_STEPS) -endif - -ifdef TIDY_STEPS -.PHONY: tidy -tidy: $(PRE_TIDY_STEPS) $(TIDY_STEPS) -endif - -ifdef BUILD_STEPS -.PHONY: build -build: $(PRE_BUILD_STEPS) $(BUILD_STEPS) -endif - -ifdef RELEASE_STEPS -.PHONY: release -release: $(PRE_RELEASE_STEPS) $(RELEASE_STEPS) -endif - -ifdef BUMPDEPS_STEPS -.PHONY: bumpdeps -bumpdeps: $(PRE_BUMDEPS_STEPS) $(BUMPDEPS_STEPS) -endif - -ifdef FMT_STEPS -.PHONY: fmt -fmt: $(PRE_FMT_STEPS) $(FMT_STEPS) -endif - -ifdef GENERATE_STEPS -.PHONY: generate -generate: $(PRE_GENERATE_STEPS) $(GENERATE_STEPS) -endif - -.PHONY: help -help: - @echo "General commands:" - @[ "$(BUILD_STEPS)" != "" ] && echo " build" || true - @[ "$(BUMPDEPS_STEPS)" != "" ] && echo " bumpdeps" || true - @[ "$(FMT_STEPS)" != "" ] && echo " fmt" || true - @[ "$(GENERATE_STEPS)" != "" ] && echo " generate" || true - @[ "$(INSTALL_STEPS)" != "" ] && echo " install" || true - @[ "$(LINT_STEPS)" != "" ] && echo " lint" || true - @[ "$(RELEASE_STEPS)" != "" ] && echo " release" || true - @[ "$(TEST_STEPS)" != "" ] && echo " test" || true - @[ "$(TIDY_STEPS)" != "" ] && echo " tidy" || true - @[ "$(UNITTEST_STEPS)" != "" ] && echo " unittest" || true - @# FIXME: list other commands diff --git a/slides/README.md b/slides/README.md deleted file mode 100644 index 8fa2682..0000000 --- a/slides/README.md +++ /dev/null @@ -1,247 +0,0 @@ -# [fit] Protobuf & Code Generation - -### 2016, by Manfred Touron (@moul) - ---- - -# overview - -* go-kit is an amazing framework to develop strong micro services -* but it requires a lot of boilerplate code -* return on experience on go-kit boilerplate code generation - ---- - -# protobuf? - -* limited to exchanges (methods and models) -* extendable with plugins -* contract-based -* universal - ---- - -# code generation? - -* the good old ./generate.sh bash script -* go:generate -* make -* protobuf + [protoc-gen-gotemplate](https://moul.io/protoc-gen-gotemplate) - ---- - -# go-kit - -* protobuf-first, rpc-first service framework in Golang -* abstract services, endpoints, transports -* requires a lot of boilerplate code in multiple packages - ---- - -# example: `session.proto` - -```protobuf -syntax = "proto3"; -package session; - -service SessionService { - rpc Login(LoginRequest) returns (LoginResponse) {} -} - -message LoginRequest { - string username = 1; - string password = 2; -} - -message LoginResponse { - string token = 1; - string err_msg = 2; -} -``` - ---- - -# example: `session.go` - -```go -package sessionsvc - -import ( - "fmt" - "golang.org/x/net/context" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/session/gen/pb" -) - -type Service struct{} - -func New() pb.SessionServiceServer { - return &Service{} -} - -func (svc *Service) Login(ctx context.Context, in *pb.LoginRequest) (*pb.LoginResponse, error) { - // custon code here - return nil, fmt.Errorf("not implemented") -} -``` - ---- - -##### example: `{{.File.Package}}/gen/transports/http/http.go.tmpl` - -```go -// source: templates/{{.File.Package}}/gen/transports/http/http.go.tmpl -package {{.File.Package}}_httptransport -import ( - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/{{.File.Package}}/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/{{.File.Package}}/gen/pb" -) -``` - -```go -// result: services/user/gen/transports/http/http.go -package user_httptransport -import ( - gokit_endpoint "github.com/go-kit/kit/endpoint" - httptransport "github.com/go-kit/kit/transport/http" - endpoints "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/endpoints" - pb "moul.io/protoc-gen-gotemplate/examples/go-kit/services/user/gen/pb" -) -``` - ---- - -##### example: `{{.File.Package}}/gen/transports/http/http.go.tmpl` - -```go -// source: templates/{{.File.Package}}/gen/transports/http/http.go.tmpl -{{range .Service.Method}} -func Make{{.Name}}Handler(ctx context.Context, svc pb.{{$file.Package | title}}ServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - ctx, - endpoint, - decode{{.Name}}Request, - encode{{.Name}}Response, - []httptransport.ServerOption{}..., - ) -} -{{end}} -``` - -```go -// result: services/user/gen/transports/http/http.go -func MakeGetUserHandler(ctx context.Context, svc pb.UserServiceServer, endpoint gokit_endpoint.Endpoint) *httptransport.Server { - return httptransport.NewServer( - ctx, - endpoint, - decodeGetUserRequest, - encodeGetUserResponse, - []httptransport.ServerOption{}..., - ) -} -``` - ---- - -##### example: `{{.File.Package}}/gen/transports/http/http.go.tmpl` - -```go -// source: templates/{{.File.Package}}/gen/transports/http/http.go.tmpl -func RegisterHandlers(ctx context.Context, svc pb.{{$file.Package | title}}ServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - {{range .Service.Method}} - log.Println("new HTTP endpoint: \"/{{.Name}}\" (service={{$file.Package | title}})") - mux.Handle("/{{.Name}}", Make{{.Name}}Handler(ctx, svc, endpoints.{{.Name}}Endpoint)) - {{end}} - return nil -} -``` - -```go -// result: services/user/gen/transports/http/http.go -func RegisterHandlers(ctx context.Context, svc pb.UserServiceServer, mux *http.ServeMux, endpoints endpoints.Endpoints) error { - - log.Println("new HTTP endpoint: \"/CreateUser\" (service=User)") - mux.Handle("/CreateUser", MakeCreateUserHandler(ctx, svc, endpoints.CreateUserEndpoint)) - - log.Println("new HTTP endpoint: \"/GetUser\" (service=User)") - mux.Handle("/GetUser", MakeGetUserHandler(ctx, svc, endpoints.GetUserEndpoint)) - - return nil -} -``` - ---- - -#### `protoc --gogo_out=plugins=grpc:. ./services/*/*.proto` - ---- - -#### `protoc --gotemplate_out=template_dir=./templates:services ./services/*/*.proto` - ---- - -![fit](assets/session-wc.png) - ---- - -![right fit](assets/wc.png) - -## 3 services -## 6 methods -## 149 custom lines -## 1429 generated lines -## business focus - ---- - -# generation usages - -* go-kit boilerplate (see [examples/go-kit](https://moul.io/protoc-gen-gotemplate/tree/master/examples/go-kit)) -* k8s configuration -* Dockerfile -* documentation -* unit-tests -* fun - ---- - -# pros - -* small custom codebase -* templates shipped with code -* hardly typed, no reflects -* genericity -* contrat terms (protobuf) respected -* not limited to a language - ---- - -# cons - -* the author needs to write its own templates -* sometimes difficult to generate valid code -* not enough helpers around the code generation yet - ---- - -# improvement ideas - -* Support protobufs extensions (i.e, annotations.probo) -* Generate one file from multiple services -* Add more helpers around the code generation - ---- - -# conclusion - -* Useful to keep everything standard -* The awesomeness of go-kit without the hassle of writing boilerplate code -* Always up-to-date with the contracts - ---- - -# questions? - -### moul.io/protoc-gen-gotemplate -### @moul diff --git a/slides/assets/session-wc.png b/slides/assets/session-wc.png deleted file mode 100644 index f4532d9..0000000 Binary files a/slides/assets/session-wc.png and /dev/null differ diff --git a/slides/assets/wc.png b/slides/assets/wc.png deleted file mode 100644 index 2b69269..0000000 Binary files a/slides/assets/wc.png and /dev/null differ diff --git a/slides/slides.pdf b/slides/slides.pdf deleted file mode 100644 index e629414..0000000 Binary files a/slides/slides.pdf and /dev/null differ diff --git a/templates/{{.File.Package}}_micro.pb.go.tmpl b/templates/{{.File.Package}}_micro.pb.go.tmpl new file mode 100644 index 0000000..2ce2e31 --- /dev/null +++ b/templates/{{.File.Package}}_micro.pb.go.tmpl @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-micro +// source: {{.File.Name}} +package {{goPkgLastElement .File | splitArray ";" | last}} + +import ( + "context" + + micro_api "github.com/unistack-org/micro/v3/api" + micro_client "github.com/unistack-org/micro/v3/client" + micro_server "github.com/unistack-org/micro/v3/server" +) + +{{- $ServiceName := .Service.Name | trimSuffix "Service" }} + +// New{{$ServiceName}}Endpoints provides api endpoints metdata for {{$ServiceName}} service +func New{{$ServiceName}}Endpoints() []*micro_api.Endpoint { + var endpoints []*micro_api.Endpoint + + {{- range .Service.Method}} + {{- if ne (httpVerb .) "" }} + endpoint := µ_api.Endpoint{ + Name: "{{$ServiceName}}.{{.Name}}", + Path: []string{"{{httpPath .}}"}, + Method: []string{"{{httpVerb .}}"}, + {{- if ne (httpBody .) "" }} + Body: "{{httpBody .}}", + {{- end}} + {{- if or (.ClientStreaming) (.ServerStreaming)}} + Stream: true, + {{- end}} + Handler: "rpc", + } + {{- range httpPathsAdditionalBindings . }} + endpoint.Path = append(endpoint.Path, "{{.}}") + {{- end}} + endpoints = append(endpoints, endpoint) + {{- end}} + {{- end}} + return endpoints +} + +// {{$ServiceName}}Service interface +type {{$ServiceName}}Service interface { + {{- range .Service.Method}} + {{- $reqMethod := .InputType | splitArray "." | last }} + {{- $rspMethod := .OutputType | splitArray "." | last }} + {{- if or (.ServerStreaming) (.ClientStreaming)}} + {{- if and (.ServerStreaming) (not .ClientStreaming)}} + {{.Name}}(context.Context, *{{$reqMethod}}, ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) + {{- else}} + {{.Name}}(context.Context, ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) + {{- end}} + {{- else}} + {{.Name}}(context.Context, *{{$reqMethod}}, ...micro_client.CallOption) (*{{$rspMethod}}, error) + {{- end}} + {{- end}} +} + +{{range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName}}_{{.Name}}Service interface { + Context() context.Context + SendMsg(interface{}) error + RecvMsg(interface{}) error + {{- if and (.ClientStreaming) (not .ServerStreaming)}} + RecvAndClose() (*{{$rspMethod}}, error) + {{- end}} + Close() error + {{- if .ClientStreaming}} + Send(*{{$reqMethod}}) error + {{- end}} + {{- if .ServerStreaming}} + Recv() (*{{$rspMethod}}, error) + {{- end}} +} +{{- end}} +{{- end}} +// Micro server stuff + +// {{$ServiceName}}Handler server handler +type {{$ServiceName}}Handler interface { + {{- range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if or (.ServerStreaming) (.ClientStreaming)}} + {{- if not .ClientStreaming}} + {{.Name}}(context.Context, *{{$reqMethod}}, {{$ServiceName}}_{{.Name}}Stream) error + {{- else}} + {{.Name}}(context.Context, {{$ServiceName}}_{{.Name}}Stream) error + {{- end}} +{{- else}} + {{.Name}}(context.Context, *{{$reqMethod}}, *{{$rspMethod}}) error +{{- end}} +{{- end}} +} + +// Register{{$ServiceName}}Handler registers server handler +func Register{{$ServiceName}}Handler(s micro_server.Server, sh {{$ServiceName}}Handler, opts ...micro_server.HandlerOption) error { + type {{$ServiceName | lowerFirst}} interface { + {{- range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if or (.ServerStreaming) (.ClientStreaming)}} + {{.Name}}(context.Context, micro_server.Stream) error +{{- else}} + {{.Name}}(context.Context, *{{$reqMethod}}, *{{$rspMethod}}) error + {{- end}} + {{- end}} + } + type {{$ServiceName}} struct { + {{$ServiceName | lowerFirst}} + } + h := &{{$ServiceName | lowerFirst}}Handler{sh} + for _, endpoint := range New{{$ServiceName}}Endpoints() { + opts = append(opts, micro_api.WithEndpoint(endpoint)) + } + return s.Handle(s.NewHandler(&{{$ServiceName}}{h}, opts...)) +} + +{{- range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName}}_{{.Name}}Stream interface { + Context() context.Context + SendMsg(interface{}) error + RecvMsg(interface{}) error + {{- if and (.ClientStreaming) (not .ServerStreaming)}} + SendAndClose(*{{$rspMethod}}) error + {{- end}} + Close() error + {{- if .ServerStreaming}} + Send(*{{$rspMethod}}) error + {{- end}} + {{- if .ClientStreaming}} + Recv() (*{{$reqMethod}}, error) + {{- end}} +} +{{- end}} +{{- end}} diff --git a/templates/{{.File.Package}}_micro_grpc.pb.go.tmpl b/templates/{{.File.Package}}_micro_grpc.pb.go.tmpl new file mode 100644 index 0000000..244b16c --- /dev/null +++ b/templates/{{.File.Package}}_micro_grpc.pb.go.tmpl @@ -0,0 +1,219 @@ +// Code generated by protoc-gen-micro +// source: {{.File.Name}} +package {{goPkgLastElement .File | splitArray ";" | last}} + +import ( + "context" + + micro_client "github.com/unistack-org/micro/v3/client" + micro_server "github.com/unistack-org/micro/v3/server" +) + +var ( + _ micro_server.Option + _ micro_client.Option +) + +{{- $File := .File }} +{{- $ServiceName := .Service.Name | trimSuffix "Service" }} + +type {{$ServiceName | lowerFirst}}Service struct { + c micro_client.Client + name string +} + +// Micro client stuff + +// New{{$ServiceName}}Service create new service client +func New{{$ServiceName}}Service(name string, c micro_client.Client) {{$ServiceName}}Service { + return &{{$ServiceName | lowerFirst}}Service{c: c, name: name} +} + +{{range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if and (not .ClientStreaming) (.ServerStreaming) -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, opts ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) { + {{- else if .ClientStreaming -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, opts ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) { + {{- else -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, opts ...micro_client.CallOption) (*{{$rspMethod}}, error) { + {{- end -}} + {{- if or (.ServerStreaming) (.ClientStreaming)}} + stream, err := c.c.Stream(ctx, c.c.NewRequest(c.name, "{{$ServiceName}}.{{.Name}}", &{{$reqMethod}}{}), opts...) + if err != nil { + return nil, err + } + {{- if not .ClientStreaming }} + if err := stream.Send(req); err != nil { + return nil, err + } + {{- end}} + return &{{$ServiceName | lowerFirst}}Service{{.Name}}{stream}, nil + {{- else}} + rsp := &{{$rspMethod}}{} + err := c.c.Call(ctx, c.c.NewRequest(c.name, "{{$ServiceName}}.{{.Name}}", req), rsp, opts...) + if err != nil { + return nil, err + } + return rsp, nil + {{- end}} +} +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName}}_{{.Name}}Service interface { + Context() context.Context + SendMsg(interface{}) error + RecvMsg(interface{}) error + {{- if and (.ClientStreaming) (not .ServerStreaming)}} + RecvAndClose() (*{{$rspMethod}}, error) + {{- end}} + Close() error + {{- if .ClientStreaming}} + Send(*{{$reqMethod}}) error + {{- end}} + {{- if .ServerStreaming}} + Recv() (*{{$rspMethod}}, error) + {{- end}} +} + +type {{$ServiceName | lowerFirst}}Service{{.Name}} struct { + stream micro_client.Stream +} +{{if and (.ClientStreaming) (not .ServerStreaming)}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) RecvAndClose() (*{{$rspMethod}}, error) { + m := &{{$rspMethod}}{} + err := x.RecvMsg(m) + if err == nil { + err = x.Close() + } + + if err != nil { + return nil, err + } + + return m, nil +} +{{- end}} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Close() error { + return x.stream.Close() +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Context() context.Context { + return x.stream.Context() +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) SendMsg(m interface{}) error { + return x.stream.Send(m) +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) RecvMsg(m interface{}) error { + return x.stream.Recv(m) +} + +{{ if .ClientStreaming -}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Send(m *{{$reqMethod}}) error { + return x.stream.Send(m) +} +{{- end}} + +{{ if .ServerStreaming -}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Recv() (*{{$rspMethod}}, error) { + m := &{{$rspMethod}}{} + if err := x.stream.Recv(m); err != nil { + return nil, err + } + return m, nil +} +{{- end }} + +{{- end}} +{{- end -}} + +// Micro server stuff + +type {{$ServiceName | lowerFirst}}Handler struct { + {{$ServiceName}}Handler +} +{{range .Service.Method }} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if or (.ServerStreaming) (.ClientStreaming)}} +func (h *{{$ServiceName | lowerFirst}}Handler) {{.Name}}(ctx context.Context, stream micro_server.Stream) error { +{{- if not .ClientStreaming}} + m := &{{$reqMethod}}{} + if err := stream.Recv(m); err != nil { + return err + } + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, m, &{{$ServiceName | lowerFirst}}{{.Name}}Stream{stream}) +{{- else}} + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, &{{$ServiceName | lowerFirst}}{{.Name}}Stream{stream}) +{{- end}} +} +{{- else}} +func (h *{{$ServiceName | lowerFirst}}Handler) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, rsp *{{$rspMethod}}) error { + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, req, rsp) +} +{{- end}} +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName}}_{{.Name}}Stream interface { + Context() context.Context + SendMsg(interface{}) error + RecvMsg(interface{}) error + {{- if and (.ClientStreaming) (not .ServerStreaming)}} + SendAndClose(*{{$rspMethod}}) error + {{- end}} + Close() error + {{- if .ServerStreaming}} + Send(*{{$rspMethod}}) error + {{- end}} + {{- if .ClientStreaming}} + Recv() (*{{$reqMethod}}, error) + {{- end}} +} + +type {{$ServiceName | lowerFirst}}{{.Name}}Stream struct { + stream micro_server.Stream +} +{{if and (.ClientStreaming) (not .ServerStreaming)}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) SendAndClose(m *{{$rspMethod}}) error { + err := x.SendMsg(m) + if err == nil { + err = x.stream.Close() + } + return err +} +{{- end}} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Close() error { + return x.stream.Close() +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Context() context.Context { + return x.stream.Context() +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) SendMsg(m interface{}) error { + return x.stream.Send(m) +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) RecvMsg(m interface{}) error { + return x.stream.Recv(m) +} +{{- if .ServerStreaming}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Send(m *{{$rspMethod}}) error { + return x.stream.Send(m) +} +{{- end}} +{{if .ClientStreaming}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Recv() (*{{$reqMethod}}, error) { + m := &{{$reqMethod}}{} + if err := x.stream.Recv(m); err != nil { + return nil, err + } + return m, nil +} +{{- end}} + +{{- end}} +{{- end}} diff --git a/templates/{{.File.Package}}_micro_http.pb.go.tmpl b/templates/{{.File.Package}}_micro_http.pb.go.tmpl new file mode 100644 index 0000000..a22f48c --- /dev/null +++ b/templates/{{.File.Package}}_micro_http.pb.go.tmpl @@ -0,0 +1,218 @@ +// Code generated by protoc-gen-micro +// source: {{.File.Name}} +package {{goPkgLastElement .File | splitArray ";" | last}} + +import ( + "context" + + micro_client "github.com/unistack-org/micro/v3/client" + micro_server "github.com/unistack-org/micro/v3/server" + micro_client_http "github.com/unistack-org/micro-client-http" +) + +var ( + _ micro_server.Option + _ micro_client.Option +) + +{{- $File := .File }} +{{- $ServiceName := .Service.Name | trimSuffix "Service" }} + +type {{$ServiceName | lowerFirst}}Service struct { + c micro_client.Client + name string +} + +// Micro client stuff + +// New{{$ServiceName}}Service create new service client +func New{{$ServiceName}}Service(name string, c micro_client.Client) {{$ServiceName}}Service { + return &{{$ServiceName | lowerFirst}}Service{c: c, name: name} +} + +{{range .Service.Method}} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if and (not .ClientStreaming) (.ServerStreaming) -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, opts ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) { + {{- else if .ClientStreaming -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, opts ...micro_client.CallOption) ({{$ServiceName}}_{{.Name}}Service, error) { + {{- else -}} +func (c *{{$ServiceName | lowerFirst}}Service) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, opts ...micro_client.CallOption) (*{{$rspMethod}}, error) { + {{- end }} + {{- if (openapiOption .).Responses }} + errmap := make(map[string]interface{}, {{ len (openapiOption .).Responses}}) + {{- range $k, $v := (openapiOption .).Responses }} + errmap["{{$k}}"] = &{{- (getMessageType $File $v.Schema.JsonSchema.Ref).Name }}{} + {{- end }} + {{- end }} + nopts := append(opts, + micro_client_http.Method("{{httpVerb .}}"), + micro_client_http.Path("{{httpPath .}}"), + {{- if (openapiOption .).Responses }} + micro_client_http.ErrorMap(errmap), + {{- end }} + ) + {{- if or (.ServerStreaming) (.ClientStreaming)}} + stream, err := c.c.Stream(ctx, c.c.NewRequest(c.name, "{{$ServiceName}}.{{.Name}}", &{{$reqMethod}}{}), nopts...) + if err != nil { + return nil, err + } + {{- if not .ClientStreaming }} + if err := stream.Send(req); err != nil { + return nil, err + } + {{- end}} + return &{{$ServiceName | lowerFirst}}Service{{.Name}}{stream}, nil + {{- else}} + rsp := &{{$rspMethod}}{} + err := c.c.Call(ctx, c.c.NewRequest(c.name, "{{$ServiceName}}.{{.Name}}", req), rsp, nopts...) + if err != nil { + return nil, err + } + return rsp, nil + {{- end}} +} + +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName | lowerFirst}}Service{{.Name}} struct { + stream micro_client.Stream +} +{{if and (.ClientStreaming) (not .ServerStreaming)}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) RecvAndClose() (*{{$rspMethod}}, error) { + m := &{{$rspMethod}}{} + err := x.RecvMsg(m) + if err == nil { + err = x.Close() + } + + if err != nil { + return nil, err + } + + return m, nil +} +{{- end}} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Close() error { + return x.stream.Close() +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Context() context.Context { + return x.stream.Context() +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) SendMsg(m interface{}) error { + return x.stream.Send(m) +} + +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) RecvMsg(m interface{}) error { + return x.stream.Recv(m) +} + +{{ if .ClientStreaming -}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Send(m *{{$reqMethod}}) error { + return x.stream.Send(m) +} +{{- end}} + +{{ if .ServerStreaming -}} +func (x *{{$ServiceName | lowerFirst}}Service{{.Name}}) Recv() (*{{$rspMethod}}, error) { + m := &{{$rspMethod}}{} + if err := x.stream.Recv(m); err != nil { + return nil, err + } + return m, nil +} +{{- end }} + +{{- end}} +{{- end -}} + +// Micro server stuff + +type {{$ServiceName | lowerFirst}}Handler struct { + {{$ServiceName}}Handler +} +{{range .Service.Method }} +{{- $reqMethod := .InputType | splitArray "." | last}} +{{- $rspMethod := .OutputType | splitArray "." | last}} +{{- if or (.ServerStreaming) (.ClientStreaming)}} +func (h *{{$ServiceName | lowerFirst}}Handler) {{.Name}}(ctx context.Context, stream micro_server.Stream) error { +{{- if not .ClientStreaming}} + m := &{{$reqMethod}}{} + if err := stream.Recv(m); err != nil { + return err + } + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, m, &{{$ServiceName | lowerFirst}}{{.Name}}Stream{stream}) +{{- else}} + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, &{{$ServiceName | lowerFirst}}{{.Name}}Stream{stream}) +{{- end}} +} +{{- else}} +func (h *{{$ServiceName | lowerFirst}}Handler) {{.Name}}(ctx context.Context, req *{{$reqMethod}}, rsp *{{$rspMethod}}) error { + return h.{{$ServiceName}}Handler.{{.Name}}(ctx, req, rsp) +} +{{- end}} +{{if or (.ServerStreaming) (.ClientStreaming)}} +type {{$ServiceName}}_{{.Name}}Stream interface { + Context() context.Context + SendMsg(interface{}) error + RecvMsg(interface{}) error + {{- if and (.ClientStreaming) (not .ServerStreaming)}} + SendAndClose(*{{$rspMethod}}) error + {{- end}} + Close() error + {{- if .ServerStreaming}} + Send(*{{$rspMethod}}) error + {{- end}} + {{- if .ClientStreaming}} + Recv() (*{{$reqMethod}}, error) + {{- end}} +} + +type {{$ServiceName | lowerFirst}}{{.Name}}Stream struct { + stream micro_server.Stream +} +{{if and (.ClientStreaming) (not .ServerStreaming)}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) SendAndClose(m *{{$rspMethod}}) error { + err := x.SendMsg(m) + if err == nil { + err = x.stream.Close() + } + return err +} +{{- end}} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Close() error { + return x.stream.Close() +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Context() context.Context { + return x.stream.Context() +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) SendMsg(m interface{}) error { + return x.stream.Send(m) +} + +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) RecvMsg(m interface{}) error { + return x.stream.Recv(m) +} +{{- if .ServerStreaming}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Send(m *{{$rspMethod}}) error { + return x.stream.Send(m) +} +{{- end}} +{{if .ClientStreaming}} +func (x *{{$ServiceName | lowerFirst}}{{.Name}}Stream) Recv() (*{{$reqMethod}}, error) { + m := &{{$reqMethod}}{} + if err := x.stream.Recv(m); err != nil { + return nil, err + } + return m, nil +} +{{- end}} + +{{- end}} +{{- end}} diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md deleted file mode 100644 index 237d53a..0000000 --- a/vendor/github.com/Masterminds/semver/CHANGELOG.md +++ /dev/null @@ -1,45 +0,0 @@ -# Release 1.2.2 (2016-12-13) - -## Fixed -- #34: Fixed issue where hyphen range was not working with pre-release parsing. - -# Release 1.2.1 (2016-11-28) - -## Fixed -- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha" - properly. - -# Release 1.2.0 (2016-11-04) - -## Added -- #20: Added MustParse function for versions (thanks @adamreese) -- #15: Added increment methods on versions (thanks @mh-cbon) - -## Fixed -- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and - might not satisfy the intended compatibility. The change here ignores pre-releases - on constraint checks (e.g., ~ or ^) when a pre-release is not part of the - constraint. For example, `^1.2.3` will ignore pre-releases while - `^1.2.3-alpha` will include them. - -# Release 1.1.1 (2016-06-30) - -## Changed -- Issue #9: Speed up version comparison performance (thanks @sdboyer) -- Issue #8: Added benchmarks (thanks @sdboyer) -- Updated Go Report Card URL to new location -- Updated Readme to add code snippet formatting (thanks @mh-cbon) -- Updating tagging to v[SemVer] structure for compatibility with other tools. - -# Release 1.1.0 (2016-03-11) - -- Issue #2: Implemented validation to provide reasons a versions failed a - constraint. - -# Release 1.0.1 (2015-12-31) - -- Fixed #1: * constraint failing on valid versions. - -# Release 1.0.0 (2015-10-20) - -- Initial release diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt deleted file mode 100644 index 0da4aea..0000000 --- a/vendor/github.com/Masterminds/semver/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -The Masterminds -Copyright (C) 2014-2015, Matt Butcher and Matt Farina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile deleted file mode 100644 index a7a1b4e..0000000 --- a/vendor/github.com/Masterminds/semver/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -.PHONY: setup -setup: - go get -u gopkg.in/alecthomas/gometalinter.v1 - gometalinter.v1 --install - -.PHONY: test -test: validate lint - @echo "==> Running tests" - go test -v - -.PHONY: validate -validate: - @echo "==> Running static validations" - @gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1 - -.PHONY: lint -lint: - @echo "==> Running linters" - @gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || : diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md deleted file mode 100644 index 9a9ed45..0000000 --- a/vendor/github.com/Masterminds/semver/README.md +++ /dev/null @@ -1,163 +0,0 @@ -# SemVer - -The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to: - -* Parse semantic versions -* Sort semantic versions -* Check if a semantic version fits within a set of constraints -* Optionally work with a `v` prefix - -[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.png)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver) - -## Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - -```go - v, err := semver.NewVersion("1.2.3-beta.1+build345") -``` - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the [documentation](https://godoc.org/github.com/Masterminds/semver). - -## Sorting Semantic Versions - -A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/) -package from the standard library. For example, - -```go - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) -``` - -## Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - -```go - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) -``` - -## Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - -* `=`: equal (aliased to no operator) -* `!=`: not equal -* `>`: greater than -* `<`: less than -* `>=`: greater than or equal to -* `<=`: less than or equal to - -_Note, according to the Semantic Version specification pre-releases may not be -API compliant with their release counterpart. It says,_ - -> _A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version._ - -_SemVer comparisons without a pre-release value will skip pre-release versions. -For example, `>1.2.3` will skip pre-releases when looking at a list of values -while `>1.2.3-alpha.1` will evaluate pre-releases._ - -## Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - -* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` -* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -## Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - -* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `>= 1.2.x` is equivalent to `>= 1.2.0` -* `<= 2.x` is equivalent to `<= 3` -* `*` is equivalent to `>= 0.0.0` - -## Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - -* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` -* `~1` is equivalent to `>= 1, < 2` -* `~2.3` is equivalent to `>= 2.3, < 2.4` -* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `~1.x` is equivalent to `>= 1, < 2` - -## Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - -* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` -* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` -* `^2.3` is equivalent to `>= 2.3, < 3` -* `^2.x` is equivalent to `>= 2.0.0, < 3` - -# Validation - -In addition to testing a version against a constraint, a version can be validated -against a constraint. When validation fails a slice of errors containing why a -version didn't meet the constraint is returned. For example, - -```go - c, err := semver.NewConstraint("<= 1.2.3, >= 1.4") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - - // Validate a version against a constraint. - a, msgs := c.Validate(v) - // a is false - for _, m := range msgs { - fmt.Println(m) - - // Loops over the errors which would read - // "1.3 is greater than 1.2.3" - // "1.3 is less than 1.4" - } -``` - -# Contribute - -If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) -or [create a pull request](https://github.com/Masterminds/semver/pulls). diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml deleted file mode 100644 index b2778df..0000000 --- a/vendor/github.com/Masterminds/semver/appveyor.yml +++ /dev/null @@ -1,44 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\semver -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -install: - - go version - - go env - - go get -u gopkg.in/alecthomas/gometalinter.v1 - - set PATH=%PATH%;%GOPATH%\bin - - gometalinter.v1.exe --install - -build_script: - - go install -v ./... - -test_script: - - "gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1" - - "gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || :" - - go test -v - -deploy: off diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go deleted file mode 100644 index a782358..0000000 --- a/vendor/github.com/Masterminds/semver/collection.go +++ /dev/null @@ -1,24 +0,0 @@ -package semver - -// Collection is a collection of Version instances and implements the sort -// interface. See the sort package for more details. -// https://golang.org/pkg/sort/ -type Collection []*Version - -// Len returns the length of a collection. The number of Version instances -// on the slice. -func (c Collection) Len() int { - return len(c) -} - -// Less is needed for the sort interface to compare two Version objects on the -// slice. If checks if one is less than the other. -func (c Collection) Less(i, j int) bool { - return c[i].LessThan(c[j]) -} - -// Swap is needed for the sort interface to replace the Version objects -// at two different positions in the slice. -func (c Collection) Swap(i, j int) { - c[i], c[j] = c[j], c[i] -} diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go deleted file mode 100644 index 4c4fac3..0000000 --- a/vendor/github.com/Masterminds/semver/constraints.go +++ /dev/null @@ -1,421 +0,0 @@ -package semver - -import ( - "errors" - "fmt" - "regexp" - "strings" -) - -// Constraints is one or more constraint that a semantic version can be -// checked against. -type Constraints struct { - constraints [][]*constraint -} - -// NewConstraint returns a Constraints instance that a Version instance can -// be checked against. If there is a parse error it will be returned. -func NewConstraint(c string) (*Constraints, error) { - - // Rewrite - ranges into a comparison operation. - c = rewriteRange(c) - - ors := strings.Split(c, "||") - or := make([][]*constraint, len(ors)) - for k, v := range ors { - cs := strings.Split(v, ",") - result := make([]*constraint, len(cs)) - for i, s := range cs { - pc, err := parseConstraint(s) - if err != nil { - return nil, err - } - - result[i] = pc - } - or[k] = result - } - - o := &Constraints{constraints: or} - return o, nil -} - -// Check tests if a version satisfies the constraints. -func (cs Constraints) Check(v *Version) bool { - // loop over the ORs and check the inner ANDs - for _, o := range cs.constraints { - joy := true - for _, c := range o { - if !c.check(v) { - joy = false - break - } - } - - if joy { - return true - } - } - - return false -} - -// Validate checks if a version satisfies a constraint. If not a slice of -// reasons for the failure are returned in addition to a bool. -func (cs Constraints) Validate(v *Version) (bool, []error) { - // loop over the ORs and check the inner ANDs - var e []error - for _, o := range cs.constraints { - joy := true - for _, c := range o { - if !c.check(v) { - em := fmt.Errorf(c.msg, v, c.orig) - e = append(e, em) - joy = false - } - } - - if joy { - return true, []error{} - } - } - - return false, e -} - -var constraintOps map[string]cfunc -var constraintMsg map[string]string -var constraintRegex *regexp.Regexp - -func init() { - constraintOps = map[string]cfunc{ - "": constraintTildeOrEqual, - "=": constraintTildeOrEqual, - "!=": constraintNotEqual, - ">": constraintGreaterThan, - "<": constraintLessThan, - ">=": constraintGreaterThanEqual, - "=>": constraintGreaterThanEqual, - "<=": constraintLessThanEqual, - "=<": constraintLessThanEqual, - "~": constraintTilde, - "~>": constraintTilde, - "^": constraintCaret, - } - - constraintMsg = map[string]string{ - "": "%s is not equal to %s", - "=": "%s is not equal to %s", - "!=": "%s is equal to %s", - ">": "%s is less than or equal to %s", - "<": "%s is greater than or equal to %s", - ">=": "%s is less than %s", - "=>": "%s is less than %s", - "<=": "%s is greater than %s", - "=<": "%s is greater than %s", - "~": "%s does not have same major and minor version as %s", - "~>": "%s does not have same major and minor version as %s", - "^": "%s does not have same major version as %s", - } - - ops := make([]string, 0, len(constraintOps)) - for k := range constraintOps { - ops = append(ops, regexp.QuoteMeta(k)) - } - - constraintRegex = regexp.MustCompile(fmt.Sprintf( - `^\s*(%s)\s*(%s)\s*$`, - strings.Join(ops, "|"), - cvRegex)) - - constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( - `\s*(%s)\s+-\s+(%s)\s*`, - cvRegex, cvRegex)) -} - -// An individual constraint -type constraint struct { - // The callback function for the restraint. It performs the logic for - // the constraint. - function cfunc - - msg string - - // The version used in the constraint check. For example, if a constraint - // is '<= 2.0.0' the con a version instance representing 2.0.0. - con *Version - - // The original parsed version (e.g., 4.x from != 4.x) - orig string - - // When an x is used as part of the version (e.g., 1.x) - minorDirty bool - dirty bool -} - -// Check if a version meets the constraint -func (c *constraint) check(v *Version) bool { - return c.function(v, c) -} - -type cfunc func(v *Version, c *constraint) bool - -func parseConstraint(c string) (*constraint, error) { - m := constraintRegex.FindStringSubmatch(c) - if m == nil { - return nil, fmt.Errorf("improper constraint: %s", c) - } - - ver := m[2] - orig := ver - minorDirty := false - dirty := false - if isX(m[3]) { - ver = "0.0.0" - dirty = true - } else if isX(strings.TrimPrefix(m[4], ".")) { - minorDirty = true - dirty = true - ver = fmt.Sprintf("%s.0.0%s", m[3], m[6]) - } else if isX(strings.TrimPrefix(m[5], ".")) { - dirty = true - ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6]) - } - - con, err := NewVersion(ver) - if err != nil { - - // The constraintRegex should catch any regex parsing errors. So, - // we should never get here. - return nil, errors.New("constraint Parser Error") - } - - cs := &constraint{ - function: constraintOps[m[1]], - msg: constraintMsg[m[1]], - con: con, - orig: orig, - minorDirty: minorDirty, - dirty: dirty, - } - return cs, nil -} - -// Constraint functions -func constraintNotEqual(v *Version, c *constraint) bool { - if c.dirty { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.con.Major() != v.Major() { - return true - } - if c.con.Minor() != v.Minor() && !c.minorDirty { - return true - } else if c.minorDirty { - return false - } - - return false - } - - return !v.Equal(c.con) -} - -func constraintGreaterThan(v *Version, c *constraint) bool { - - // An edge case the constraint is 0.0.0 and the version is 0.0.0-someprerelease - // exists. This that case. - if !isNonZero(c.con) && isNonZero(v) { - return true - } - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) == 1 -} - -func constraintLessThan(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) < 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -func constraintGreaterThanEqual(v *Version, c *constraint) bool { - // An edge case the constraint is 0.0.0 and the version is 0.0.0-someprerelease - // exists. This that case. - if !isNonZero(c.con) && isNonZero(v) { - return true - } - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) >= 0 -} - -func constraintLessThanEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) <= 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// ~*, ~>* --> >= 0.0.0 (any) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 -func constraintTilde(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - // ~0.0.0 is a special case where all constraints are accepted. It's - // equivalent to >= 0.0.0. - if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 { - return true - } - - if v.Major() != c.con.Major() { - return false - } - - if v.Minor() != c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// When there is a .x (dirty) status it automatically opts in to ~. Otherwise -// it's a straight = -func constraintTildeOrEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.dirty { - c.msg = constraintMsg["~"] - return constraintTilde(v, c) - } - - return v.Equal(c.con) -} - -// ^* --> (any) -// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0 -// ^1.2.3 --> >=1.2.3, <2.0.0 -// ^1.2.0 --> >=1.2.0, <2.0.0 -func constraintCaret(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - if v.Major() != c.con.Major() { - return false - } - - return true -} - -var constraintRangeRegex *regexp.Regexp - -const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -func isX(x string) bool { - switch x { - case "x", "*", "X": - return true - default: - return false - } -} - -func rewriteRange(i string) string { - m := constraintRangeRegex.FindAllStringSubmatch(i, -1) - if m == nil { - return i - } - o := i - for _, v := range m { - t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) - o = strings.Replace(o, v[0], t, 1) - } - - return o -} - -// Detect if a version is not zero (0.0.0) -func isNonZero(v *Version) bool { - if v.Major() != 0 || v.Minor() != 0 || v.Patch() != 0 || v.Prerelease() != "" { - return true - } - - return false -} diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go deleted file mode 100644 index e00f65e..0000000 --- a/vendor/github.com/Masterminds/semver/doc.go +++ /dev/null @@ -1,115 +0,0 @@ -/* -Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go. - -Specifically it provides the ability to: - - * Parse semantic versions - * Sort semantic versions - * Check if a semantic version fits within a set of constraints - * Optionally work with a `v` prefix - -Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - - v, err := semver.NewVersion("1.2.3-beta.1+build345") - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the documentation at https://godoc.org/github.com/Masterminds/semver. - -Sorting Semantic Versions - -A set of versions can be sorted using the `sort` package from the standard library. -For example, - - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) - -Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) - -Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - - * `=`: equal (aliased to no operator) - * `!=`: not equal - * `>`: greater than - * `<`: less than - * `>=`: greater than or equal to - * `<=`: less than or equal to - -Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - - * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` - * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - - * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `>= 1.2.x` is equivalent to `>= 1.2.0` - * `<= 2.x` is equivalent to `<= 3` - * `*` is equivalent to `>= 0.0.0` - -Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - - * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` - * `~1` is equivalent to `>= 1, < 2` - * `~2.3` is equivalent to `>= 2.3, < 2.4` - * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `~1.x` is equivalent to `>= 1, < 2` - -Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - - * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` - * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` - * `^2.3` is equivalent to `>= 2.3, < 3` - * `^2.x` is equivalent to `>= 2.0.0, < 3` -*/ -package semver diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go deleted file mode 100644 index 28b26ac..0000000 --- a/vendor/github.com/Masterminds/semver/version.go +++ /dev/null @@ -1,375 +0,0 @@ -package semver - -import ( - "bytes" - "errors" - "fmt" - "regexp" - "strconv" - "strings" -) - -// The compiled version of the regex created at init() is cached here so it -// only needs to be created once. -var versionRegex *regexp.Regexp -var validPrereleaseRegex *regexp.Regexp - -var ( - // ErrInvalidSemVer is returned a version is found to be invalid when - // being parsed. - ErrInvalidSemVer = errors.New("Invalid Semantic Version") - - // ErrInvalidMetadata is returned when the metadata is an invalid format - ErrInvalidMetadata = errors.New("Invalid Metadata string") - - // ErrInvalidPrerelease is returned when the pre-release is an invalid format - ErrInvalidPrerelease = errors.New("Invalid Prerelease string") -) - -// SemVerRegex is the regular expression used to parse a semantic version. -const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -// ValidPrerelease is the regular expression which validates -// both prerelease and metadata values. -const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)` - -// Version represents a single semantic version. -type Version struct { - major, minor, patch int64 - pre string - metadata string - original string -} - -func init() { - versionRegex = regexp.MustCompile("^" + SemVerRegex + "$") - validPrereleaseRegex = regexp.MustCompile(ValidPrerelease) -} - -// NewVersion parses a given version and returns an instance of Version or -// an error if unable to parse the version. -func NewVersion(v string) (*Version, error) { - m := versionRegex.FindStringSubmatch(v) - if m == nil { - return nil, ErrInvalidSemVer - } - - sv := &Version{ - metadata: m[8], - pre: m[5], - original: v, - } - - var temp int64 - temp, err := strconv.ParseInt(m[1], 10, 32) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.major = temp - - if m[2] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 32) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.minor = temp - } else { - sv.minor = 0 - } - - if m[3] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 32) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.patch = temp - } else { - sv.patch = 0 - } - - return sv, nil -} - -// MustParse parses a given version and panics on error. -func MustParse(v string) *Version { - sv, err := NewVersion(v) - if err != nil { - panic(err) - } - return sv -} - -// String converts a Version object to a string. -// Note, if the original version contained a leading v this version will not. -// See the Original() method to retrieve the original value. Semantic Versions -// don't contain a leading v per the spec. Instead it's optional on -// impelementation. -func (v *Version) String() string { - var buf bytes.Buffer - - fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch) - if v.pre != "" { - fmt.Fprintf(&buf, "-%s", v.pre) - } - if v.metadata != "" { - fmt.Fprintf(&buf, "+%s", v.metadata) - } - - return buf.String() -} - -// Original returns the original value passed in to be parsed. -func (v *Version) Original() string { - return v.original -} - -// Major returns the major version. -func (v *Version) Major() int64 { - return v.major -} - -// Minor returns the minor version. -func (v *Version) Minor() int64 { - return v.minor -} - -// Patch returns the patch version. -func (v *Version) Patch() int64 { - return v.patch -} - -// Prerelease returns the pre-release version. -func (v *Version) Prerelease() string { - return v.pre -} - -// Metadata returns the metadata on the version. -func (v *Version) Metadata() string { - return v.metadata -} - -// originalVPrefix returns the original 'v' prefix if any. -func (v *Version) originalVPrefix() string { - - // Note, only lowercase v is supported as a prefix by the parser. - if v.original != "" && v.original[:1] == "v" { - return v.original[:1] - } - return "" -} - -// IncPatch produces the next patch version. -// If the current version does not have prerelease/metadata information, -// it unsets metadata and prerelease values, increments patch number. -// If the current version has any of prerelease or metadata information, -// it unsets both values and keeps curent patch value -func (v Version) IncPatch() Version { - vNext := v - // according to http://semver.org/#spec-item-9 - // Pre-release versions have a lower precedence than the associated normal version. - // according to http://semver.org/#spec-item-10 - // Build metadata SHOULD be ignored when determining version precedence. - if v.pre != "" { - vNext.metadata = "" - vNext.pre = "" - } else { - vNext.metadata = "" - vNext.pre = "" - vNext.patch = v.patch + 1 - } - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMinor produces the next minor version. -// Sets patch to 0. -// Increments minor number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMinor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = v.minor + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMajor produces the next major version. -// Sets patch to 0. -// Sets minor to 0. -// Increments major number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMajor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = 0 - vNext.major = v.major + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// SetPrerelease defines the prerelease value. -// Value must not include the required 'hypen' prefix. -func (v Version) SetPrerelease(prerelease string) (Version, error) { - vNext := v - if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) { - return vNext, ErrInvalidPrerelease - } - vNext.pre = prerelease - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// SetMetadata defines metadata value. -// Value must not include the required 'plus' prefix. -func (v Version) SetMetadata(metadata string) (Version, error) { - vNext := v - if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) { - return vNext, ErrInvalidMetadata - } - vNext.metadata = metadata - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// LessThan tests if one version is less than another one. -func (v *Version) LessThan(o *Version) bool { - return v.Compare(o) < 0 -} - -// GreaterThan tests if one version is greater than another one. -func (v *Version) GreaterThan(o *Version) bool { - return v.Compare(o) > 0 -} - -// Equal tests if two versions are equal to each other. -// Note, versions can be equal with different metadata since metadata -// is not considered part of the comparable version. -func (v *Version) Equal(o *Version) bool { - return v.Compare(o) == 0 -} - -// Compare compares this version to another one. It returns -1, 0, or 1 if -// the version smaller, equal, or larger than the other version. -// -// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is -// lower than the version without a prerelease. -func (v *Version) Compare(o *Version) int { - // Compare the major, minor, and patch version for differences. If a - // difference is found return the comparison. - if d := compareSegment(v.Major(), o.Major()); d != 0 { - return d - } - if d := compareSegment(v.Minor(), o.Minor()); d != 0 { - return d - } - if d := compareSegment(v.Patch(), o.Patch()); d != 0 { - return d - } - - // At this point the major, minor, and patch versions are the same. - ps := v.pre - po := o.Prerelease() - - if ps == "" && po == "" { - return 0 - } - if ps == "" { - return 1 - } - if po == "" { - return -1 - } - - return comparePrerelease(ps, po) -} - -func compareSegment(v, o int64) int { - if v < o { - return -1 - } - if v > o { - return 1 - } - - return 0 -} - -func comparePrerelease(v, o string) int { - - // split the prelease versions by their part. The separator, per the spec, - // is a . - sparts := strings.Split(v, ".") - oparts := strings.Split(o, ".") - - // Find the longer length of the parts to know how many loop iterations to - // go through. - slen := len(sparts) - olen := len(oparts) - - l := slen - if olen > slen { - l = olen - } - - // Iterate over each part of the prereleases to compare the differences. - for i := 0; i < l; i++ { - // Since the lentgh of the parts can be different we need to create - // a placeholder. This is to avoid out of bounds issues. - stemp := "" - if i < slen { - stemp = sparts[i] - } - - otemp := "" - if i < olen { - otemp = oparts[i] - } - - d := comparePrePart(stemp, otemp) - if d != 0 { - return d - } - } - - // Reaching here means two versions are of equal value but have different - // metadata (the part following a +). They are not identical in string form - // but the version comparison finds them to be equal. - return 0 -} - -func comparePrePart(s, o string) int { - // Fastpath if they are equal - if s == o { - return 0 - } - - // When s or o are empty we can use the other in an attempt to determine - // the response. - if o == "" { - _, n := strconv.ParseInt(s, 10, 64) - if n != nil { - return -1 - } - return 1 - } - if s == "" { - _, n := strconv.ParseInt(o, 10, 64) - if n != nil { - return 1 - } - return -1 - } - - if s > o { - return 1 - } - return -1 -} diff --git a/vendor/github.com/Masterminds/sprig/.gitignore b/vendor/github.com/Masterminds/sprig/.gitignore deleted file mode 100644 index 5e3002f..0000000 --- a/vendor/github.com/Masterminds/sprig/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -vendor/ -/.glide diff --git a/vendor/github.com/Masterminds/sprig/CHANGELOG.md b/vendor/github.com/Masterminds/sprig/CHANGELOG.md deleted file mode 100644 index 6154db8..0000000 --- a/vendor/github.com/Masterminds/sprig/CHANGELOG.md +++ /dev/null @@ -1,131 +0,0 @@ -# Changelog - -## Release 2.14.1 (2017-12-01) - -### Fixed - -- #60: Fix typo in function name documentation (thanks @neil-ca-moore) -- #61: Removing line with {{ due to blocking github pages genertion -- #64: Update the list functions to handle int, string, and other slices for compatibility - -## Release 2.14.0 (2017-10-06) - -This new version of Sprig adds a set of functions for generating and working with SSL certificates. - -- `genCA` generates an SSL Certificate Authority -- `genSelfSignedCert` generates an SSL self-signed certificate -- `genSignedCert` generates an SSL certificate and key based on a given CA - -## Release 2.13.0 (2017-09-18) - -This release adds new functions, including: - -- `regexMatch`, `regexFindAll`, `regexFind`, `regexReplaceAll`, `regexReplaceAllLiteral`, and `regexSplit` to work with regular expressions -- `floor`, `ceil`, and `round` math functions -- `toDate` converts a string to a date -- `nindent` is just like `indent` but also prepends a new line -- `ago` returns the time from `time.Now` - -### Added - -- #40: Added basic regex functionality (thanks @alanquillin) -- #41: Added ceil floor and round functions (thanks @alanquillin) -- #48: Added toDate function (thanks @andreynering) -- #50: Added nindent function (thanks @binoculars) -- #46: Added ago function (thanks @slayer) - -### Changed - -- #51: Updated godocs to include new string functions (thanks @curtisallen) -- #49: Added ability to merge multiple dicts (thanks @binoculars) - -## Release 2.12.0 (2017-05-17) - -- `snakecase`, `camelcase`, and `shuffle` are three new string functions -- `fail` allows you to bail out of a template render when conditions are not met - -## Release 2.11.0 (2017-05-02) - -- Added `toJson` and `toPrettyJson` -- Added `merge` -- Refactored documentation - -## Release 2.10.0 (2017-03-15) - -- Added `semver` and `semverCompare` for Semantic Versions -- `list` replaces `tuple` -- Fixed issue with `join` -- Added `first`, `last`, `intial`, `rest`, `prepend`, `append`, `toString`, `toStrings`, `sortAlpha`, `reverse`, `coalesce`, `pluck`, `pick`, `compact`, `keys`, `omit`, `uniq`, `has`, `without` - -## Release 2.9.0 (2017-02-23) - -- Added `splitList` to split a list -- Added crypto functions of `genPrivateKey` and `derivePassword` - -## Release 2.8.0 (2016-12-21) - -- Added access to several path functions (`base`, `dir`, `clean`, `ext`, and `abs`) -- Added functions for _mutating_ dictionaries (`set`, `unset`, `hasKey`) - -## Release 2.7.0 (2016-12-01) - -- Added `sha256sum` to generate a hash of an input -- Added functions to convert a numeric or string to `int`, `int64`, `float64` - -## Release 2.6.0 (2016-10-03) - -- Added a `uuidv4` template function for generating UUIDs inside of a template. - -## Release 2.5.0 (2016-08-19) - -- New `trimSuffix`, `trimPrefix`, `hasSuffix`, and `hasPrefix` functions -- New aliases have been added for a few functions that didn't follow the naming conventions (`trimAll` and `abbrevBoth`) -- `trimall` and `abbrevboth` (notice the case) are deprecated and will be removed in 3.0.0 - -## Release 2.4.0 (2016-08-16) - -- Adds two functions: `until` and `untilStep` - -## Release 2.3.0 (2016-06-21) - -- cat: Concatenate strings with whitespace separators. -- replace: Replace parts of a string: `replace " " "-" "Me First"` renders "Me-First" -- plural: Format plurals: `len "foo" | plural "one foo" "many foos"` renders "many foos" -- indent: Indent blocks of text in a way that is sensitive to "\n" characters. - -## Release 2.2.0 (2016-04-21) - -- Added a `genPrivateKey` function (Thanks @bacongobbler) - -## Release 2.1.0 (2016-03-30) - -- `default` now prints the default value when it does not receive a value down the pipeline. It is much safer now to do `{{.Foo | default "bar"}}`. -- Added accessors for "hermetic" functions. These return only functions that, when given the same input, produce the same output. - -## Release 2.0.0 (2016-03-29) - -Because we switched from `int` to `int64` as the return value for all integer math functions, the library's major version number has been incremented. - -- `min` complements `max` (formerly `biggest`) -- `empty` indicates that a value is the empty value for its type -- `tuple` creates a tuple inside of a template: `{{$t := tuple "a", "b" "c"}}` -- `dict` creates a dictionary inside of a template `{{$d := dict "key1" "val1" "key2" "val2"}}` -- Date formatters have been added for HTML dates (as used in `date` input fields) -- Integer math functions can convert from a number of types, including `string` (via `strconv.ParseInt`). - -## Release 1.2.0 (2016-02-01) - -- Added quote and squote -- Added b32enc and b32dec -- add now takes varargs -- biggest now takes varargs - -## Release 1.1.0 (2015-12-29) - -- Added #4: Added contains function. strings.Contains, but with the arguments - switched to simplify common pipelines. (thanks krancour) -- Added Travis-CI testing support - -## Release 1.0.0 (2015-12-23) - -- Initial release diff --git a/vendor/github.com/Masterminds/sprig/LICENSE.txt b/vendor/github.com/Masterminds/sprig/LICENSE.txt deleted file mode 100644 index 5c95acc..0000000 --- a/vendor/github.com/Masterminds/sprig/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Sprig -Copyright (C) 2013 Masterminds - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/sprig/Makefile b/vendor/github.com/Masterminds/sprig/Makefile deleted file mode 100644 index 63a93fd..0000000 --- a/vendor/github.com/Masterminds/sprig/Makefile +++ /dev/null @@ -1,13 +0,0 @@ - -HAS_GLIDE := $(shell command -v glide;) - -.PHONY: test -test: - go test -v . - -.PHONY: setup -setup: -ifndef HAS_GLIDE - go get -u github.com/Masterminds/glide -endif - glide install diff --git a/vendor/github.com/Masterminds/sprig/README.md b/vendor/github.com/Masterminds/sprig/README.md deleted file mode 100644 index 25bf3d4..0000000 --- a/vendor/github.com/Masterminds/sprig/README.md +++ /dev/null @@ -1,81 +0,0 @@ -# Sprig: Template functions for Go templates -[![Stability: Sustained](https://masterminds.github.io/stability/sustained.svg)](https://masterminds.github.io/stability/sustained.html) -[![Build Status](https://travis-ci.org/Masterminds/sprig.svg?branch=master)](https://travis-ci.org/Masterminds/sprig) - -The Go language comes with a [built-in template -language](http://golang.org/pkg/text/template/), but not -very many template functions. This library provides a group of commonly -used template functions. - -It is inspired by the template functions found in -[Twig](http://twig.sensiolabs.org/documentation) and also in various -JavaScript libraries, such as [underscore.js](http://underscorejs.org/). - -## Usage - -Template developers can read the [Sprig function documentation](http://masterminds.github.io/sprig/) to -learn about the >100 template functions available. - -For Go developers wishing to include Sprig as a library in their programs, -API documentation is available [at GoDoc.org](http://godoc.org/github.com/Masterminds/sprig), but -read on for standard usage. - -### Load the Sprig library - -To load the Sprig `FuncMap`: - -```go - -import ( - "github.com/Masterminds/sprig" - "html/template" -) - -// This example illustrates that the FuncMap *must* be set before the -// templates themselves are loaded. -tpl := template.Must( - template.New("base").Funcs(sprig.FuncMap()).ParseGlob("*.html") -) - - -``` - -### Call the functions inside of templates - -By convention, all functions are lowercase. This seems to follow the Go -idiom for template functions (as opposed to template methods, which are -TitleCase). - - -Example: - -``` -{{ "hello!" | upper | repeat 5 }} -``` - -Produces: - -``` -HELLO!HELLO!HELLO!HELLO!HELLO! -``` - -## Principles: - -The following principles were used in deciding on which functions to add, and -determining how to implement them. - -- Template functions should be used to build layout. Therefore, the following - types of operations are within the domain of template functions: - - Formatting - - Layout - - Simple type conversions - - Utilities that assist in handling common formatting and layout needs (e.g. arithmetic) -- Template functions should not return errors unless there is no way to print - a sensible value. For example, converting a string to an integer should not - produce an error if conversion fails. Instead, it should display a default - value that can be displayed. -- Simple math is necessary for grid layouts, pagers, and so on. Complex math - (anything other than arithmetic) should be done outside of templates. -- Template functions only deal with the data passed into them. They never retrieve - data from a source. -- Finally, do not override core Go template functions. diff --git a/vendor/github.com/Masterminds/sprig/crypto.go b/vendor/github.com/Masterminds/sprig/crypto.go deleted file mode 100644 index d1b4650..0000000 --- a/vendor/github.com/Masterminds/sprig/crypto.go +++ /dev/null @@ -1,380 +0,0 @@ -package sprig - -import ( - "bytes" - "crypto/dsa" - "crypto/ecdsa" - "crypto/elliptic" - "crypto/hmac" - "crypto/rand" - "crypto/rsa" - "crypto/sha256" - "crypto/x509" - "crypto/x509/pkix" - "encoding/asn1" - "encoding/binary" - "encoding/hex" - "encoding/pem" - "errors" - "fmt" - "math/big" - "net" - "time" - - uuid "github.com/satori/go.uuid" - "golang.org/x/crypto/scrypt" -) - -func sha256sum(input string) string { - hash := sha256.Sum256([]byte(input)) - return hex.EncodeToString(hash[:]) -} - -// uuidv4 provides a safe and secure UUID v4 implementation -func uuidv4() string { - return fmt.Sprintf("%s", uuid.NewV4()) -} - -var master_password_seed = "com.lyndir.masterpassword" - -var password_type_templates = map[string][][]byte{ - "maximum": {[]byte("anoxxxxxxxxxxxxxxxxx"), []byte("axxxxxxxxxxxxxxxxxno")}, - "long": {[]byte("CvcvnoCvcvCvcv"), []byte("CvcvCvcvnoCvcv"), []byte("CvcvCvcvCvcvno"), []byte("CvccnoCvcvCvcv"), []byte("CvccCvcvnoCvcv"), - []byte("CvccCvcvCvcvno"), []byte("CvcvnoCvccCvcv"), []byte("CvcvCvccnoCvcv"), []byte("CvcvCvccCvcvno"), []byte("CvcvnoCvcvCvcc"), - []byte("CvcvCvcvnoCvcc"), []byte("CvcvCvcvCvccno"), []byte("CvccnoCvccCvcv"), []byte("CvccCvccnoCvcv"), []byte("CvccCvccCvcvno"), - []byte("CvcvnoCvccCvcc"), []byte("CvcvCvccnoCvcc"), []byte("CvcvCvccCvccno"), []byte("CvccnoCvcvCvcc"), []byte("CvccCvcvnoCvcc"), - []byte("CvccCvcvCvccno")}, - "medium": {[]byte("CvcnoCvc"), []byte("CvcCvcno")}, - "short": {[]byte("Cvcn")}, - "basic": {[]byte("aaanaaan"), []byte("aannaaan"), []byte("aaannaaa")}, - "pin": {[]byte("nnnn")}, -} - -var template_characters = map[byte]string{ - 'V': "AEIOU", - 'C': "BCDFGHJKLMNPQRSTVWXYZ", - 'v': "aeiou", - 'c': "bcdfghjklmnpqrstvwxyz", - 'A': "AEIOUBCDFGHJKLMNPQRSTVWXYZ", - 'a': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz", - 'n': "0123456789", - 'o': "@&%?,=[]_:-+*$#!'^~;()/.", - 'x': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz0123456789!@#$%^&*()", -} - -func derivePassword(counter uint32, password_type, password, user, site string) string { - var templates = password_type_templates[password_type] - if templates == nil { - return fmt.Sprintf("cannot find password template %s", password_type) - } - - var buffer bytes.Buffer - buffer.WriteString(master_password_seed) - binary.Write(&buffer, binary.BigEndian, uint32(len(user))) - buffer.WriteString(user) - - salt := buffer.Bytes() - key, err := scrypt.Key([]byte(password), salt, 32768, 8, 2, 64) - if err != nil { - return fmt.Sprintf("failed to derive password: %s", err) - } - - buffer.Truncate(len(master_password_seed)) - binary.Write(&buffer, binary.BigEndian, uint32(len(site))) - buffer.WriteString(site) - binary.Write(&buffer, binary.BigEndian, counter) - - var hmacv = hmac.New(sha256.New, key) - hmacv.Write(buffer.Bytes()) - var seed = hmacv.Sum(nil) - var temp = templates[int(seed[0])%len(templates)] - - buffer.Truncate(0) - for i, element := range temp { - pass_chars := template_characters[element] - pass_char := pass_chars[int(seed[i+1])%len(pass_chars)] - buffer.WriteByte(pass_char) - } - - return buffer.String() -} - -func generatePrivateKey(typ string) string { - var priv interface{} - var err error - switch typ { - case "", "rsa": - // good enough for government work - priv, err = rsa.GenerateKey(rand.Reader, 4096) - case "dsa": - key := new(dsa.PrivateKey) - // again, good enough for government work - if err = dsa.GenerateParameters(&key.Parameters, rand.Reader, dsa.L2048N256); err != nil { - return fmt.Sprintf("failed to generate dsa params: %s", err) - } - err = dsa.GenerateKey(key, rand.Reader) - priv = key - case "ecdsa": - // again, good enough for government work - priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader) - default: - return "Unknown type " + typ - } - if err != nil { - return fmt.Sprintf("failed to generate private key: %s", err) - } - - return string(pem.EncodeToMemory(pemBlockForKey(priv))) -} - -type DSAKeyFormat struct { - Version int - P, Q, G, Y, X *big.Int -} - -func pemBlockForKey(priv interface{}) *pem.Block { - switch k := priv.(type) { - case *rsa.PrivateKey: - return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)} - case *dsa.PrivateKey: - val := DSAKeyFormat{ - P: k.P, Q: k.Q, G: k.G, - Y: k.Y, X: k.X, - } - bytes, _ := asn1.Marshal(val) - return &pem.Block{Type: "DSA PRIVATE KEY", Bytes: bytes} - case *ecdsa.PrivateKey: - b, _ := x509.MarshalECPrivateKey(k) - return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b} - default: - return nil - } -} - -type certificate struct { - Cert string - Key string -} - -func generateCertificateAuthority( - cn string, - daysValid int, -) (certificate, error) { - ca := certificate{} - - template, err := getBaseCertTemplate(cn, nil, nil, daysValid) - if err != nil { - return ca, err - } - // Override KeyUsage and IsCA - template.KeyUsage = x509.KeyUsageKeyEncipherment | - x509.KeyUsageDigitalSignature | - x509.KeyUsageCertSign - template.IsCA = true - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return ca, fmt.Errorf("error generating rsa key: %s", err) - } - - ca.Cert, ca.Key, err = getCertAndKey(template, priv, template, priv) - if err != nil { - return ca, err - } - - return ca, nil -} - -func generateSelfSignedCertificate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, -) (certificate, error) { - cert := certificate{} - - template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) - if err != nil { - return cert, err - } - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) - } - - cert.Cert, cert.Key, err = getCertAndKey(template, priv, template, priv) - if err != nil { - return cert, err - } - - return cert, nil -} - -func generateSignedCertificate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, - ca certificate, -) (certificate, error) { - cert := certificate{} - - decodedSignerCert, _ := pem.Decode([]byte(ca.Cert)) - if decodedSignerCert == nil { - return cert, errors.New("unable to decode certificate") - } - signerCert, err := x509.ParseCertificate(decodedSignerCert.Bytes) - if err != nil { - return cert, fmt.Errorf( - "error parsing certificate: decodedSignerCert.Bytes: %s", - err, - ) - } - decodedSignerKey, _ := pem.Decode([]byte(ca.Key)) - if decodedSignerKey == nil { - return cert, errors.New("unable to decode key") - } - signerKey, err := x509.ParsePKCS1PrivateKey(decodedSignerKey.Bytes) - if err != nil { - return cert, fmt.Errorf( - "error parsing prive key: decodedSignerKey.Bytes: %s", - err, - ) - } - - template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid) - if err != nil { - return cert, err - } - - priv, err := rsa.GenerateKey(rand.Reader, 2048) - if err != nil { - return cert, fmt.Errorf("error generating rsa key: %s", err) - } - - cert.Cert, cert.Key, err = getCertAndKey( - template, - priv, - signerCert, - signerKey, - ) - if err != nil { - return cert, err - } - - return cert, nil -} - -func getCertAndKey( - template *x509.Certificate, - signeeKey *rsa.PrivateKey, - parent *x509.Certificate, - signingKey *rsa.PrivateKey, -) (string, string, error) { - derBytes, err := x509.CreateCertificate( - rand.Reader, - template, - parent, - &signeeKey.PublicKey, - signingKey, - ) - if err != nil { - return "", "", fmt.Errorf("error creating certificate: %s", err) - } - - certBuffer := bytes.Buffer{} - if err := pem.Encode( - &certBuffer, - &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}, - ); err != nil { - return "", "", fmt.Errorf("error pem-encoding certificate: %s", err) - } - - keyBuffer := bytes.Buffer{} - if err := pem.Encode( - &keyBuffer, - &pem.Block{ - Type: "RSA PRIVATE KEY", - Bytes: x509.MarshalPKCS1PrivateKey(signeeKey), - }, - ); err != nil { - return "", "", fmt.Errorf("error pem-encoding key: %s", err) - } - - return string(certBuffer.Bytes()), string(keyBuffer.Bytes()), nil -} - -func getBaseCertTemplate( - cn string, - ips []interface{}, - alternateDNS []interface{}, - daysValid int, -) (*x509.Certificate, error) { - ipAddresses, err := getNetIPs(ips) - if err != nil { - return nil, err - } - dnsNames, err := getAlternateDNSStrs(alternateDNS) - if err != nil { - return nil, err - } - return &x509.Certificate{ - SerialNumber: big.NewInt(1), - Subject: pkix.Name{ - CommonName: cn, - }, - IPAddresses: ipAddresses, - DNSNames: dnsNames, - NotBefore: time.Now(), - NotAfter: time.Now().Add(time.Hour * 24 * time.Duration(daysValid)), - KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, - ExtKeyUsage: []x509.ExtKeyUsage{ - x509.ExtKeyUsageServerAuth, - x509.ExtKeyUsageClientAuth, - }, - BasicConstraintsValid: true, - }, nil -} - -func getNetIPs(ips []interface{}) ([]net.IP, error) { - if ips == nil { - return []net.IP{}, nil - } - var ipStr string - var ok bool - var netIP net.IP - netIPs := make([]net.IP, len(ips)) - for i, ip := range ips { - ipStr, ok = ip.(string) - if !ok { - return nil, fmt.Errorf("error parsing ip: %v is not a string", ip) - } - netIP = net.ParseIP(ipStr) - if netIP == nil { - return nil, fmt.Errorf("error parsing ip: %s", ipStr) - } - netIPs[i] = netIP - } - return netIPs, nil -} - -func getAlternateDNSStrs(alternateDNS []interface{}) ([]string, error) { - if alternateDNS == nil { - return []string{}, nil - } - var dnsStr string - var ok bool - alternateDNSStrs := make([]string, len(alternateDNS)) - for i, dns := range alternateDNS { - dnsStr, ok = dns.(string) - if !ok { - return nil, fmt.Errorf( - "error processing alternate dns name: %v is not a string", - dns, - ) - } - alternateDNSStrs[i] = dnsStr - } - return alternateDNSStrs, nil -} diff --git a/vendor/github.com/Masterminds/sprig/date.go b/vendor/github.com/Masterminds/sprig/date.go deleted file mode 100644 index 90bde03..0000000 --- a/vendor/github.com/Masterminds/sprig/date.go +++ /dev/null @@ -1,76 +0,0 @@ -package sprig - -import ( - "time" -) - -// Given a format and a date, format the date string. -// -// Date can be a `time.Time` or an `int, int32, int64`. -// In the later case, it is treated as seconds since UNIX -// epoch. -func date(fmt string, date interface{}) string { - return dateInZone(fmt, date, "Local") -} - -func htmlDate(date interface{}) string { - return dateInZone("2006-01-02", date, "Local") -} - -func htmlDateInZone(date interface{}, zone string) string { - return dateInZone("2006-01-02", date, zone) -} - -func dateInZone(fmt string, date interface{}, zone string) string { - var t time.Time - switch date := date.(type) { - default: - t = time.Now() - case time.Time: - t = date - case int64: - t = time.Unix(date, 0) - case int: - t = time.Unix(int64(date), 0) - case int32: - t = time.Unix(int64(date), 0) - } - - loc, err := time.LoadLocation(zone) - if err != nil { - loc, _ = time.LoadLocation("UTC") - } - - return t.In(loc).Format(fmt) -} - -func dateModify(fmt string, date time.Time) time.Time { - d, err := time.ParseDuration(fmt) - if err != nil { - return date - } - return date.Add(d) -} - -func dateAgo(date interface{}) string { - var t time.Time - - switch date := date.(type) { - default: - t = time.Now() - case time.Time: - t = date - case int64: - t = time.Unix(date, 0) - case int: - t = time.Unix(int64(date), 0) - } - // Drop resolution to seconds - duration := time.Since(t) / time.Second * time.Second - return duration.String() -} - -func toDate(fmt, str string) time.Time { - t, _ := time.ParseInLocation(fmt, str, time.Local) - return t -} diff --git a/vendor/github.com/Masterminds/sprig/defaults.go b/vendor/github.com/Masterminds/sprig/defaults.go deleted file mode 100644 index a2381d6..0000000 --- a/vendor/github.com/Masterminds/sprig/defaults.go +++ /dev/null @@ -1,75 +0,0 @@ -package sprig - -import ( - "encoding/json" - "reflect" -) - -// dfault checks whether `given` is set, and returns default if not set. -// -// This returns `d` if `given` appears not to be set, and `given` otherwise. -// -// For numeric types 0 is unset. -// For strings, maps, arrays, and slices, len() = 0 is considered unset. -// For bool, false is unset. -// Structs are never considered unset. -// -// For everything else, including pointers, a nil value is unset. -func dfault(d interface{}, given ...interface{}) interface{} { - - if empty(given) || empty(given[0]) { - return d - } - return given[0] -} - -// empty returns true if the given value has the zero value for its type. -func empty(given interface{}) bool { - g := reflect.ValueOf(given) - if !g.IsValid() { - return true - } - - // Basically adapted from text/template.isTrue - switch g.Kind() { - default: - return g.IsNil() - case reflect.Array, reflect.Slice, reflect.Map, reflect.String: - return g.Len() == 0 - case reflect.Bool: - return g.Bool() == false - case reflect.Complex64, reflect.Complex128: - return g.Complex() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return g.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return g.Uint() == 0 - case reflect.Float32, reflect.Float64: - return g.Float() == 0 - case reflect.Struct: - return false - } - return true -} - -// coalesce returns the first non-empty value. -func coalesce(v ...interface{}) interface{} { - for _, val := range v { - if !empty(val) { - return val - } - } - return nil -} - -// toJson encodes an item into a JSON string -func toJson(v interface{}) string { - output, _ := json.Marshal(v) - return string(output) -} - -// toPrettyJson encodes an item into a pretty (indented) JSON string -func toPrettyJson(v interface{}) string { - output, _ := json.MarshalIndent(v, "", " ") - return string(output) -} diff --git a/vendor/github.com/Masterminds/sprig/dict.go b/vendor/github.com/Masterminds/sprig/dict.go deleted file mode 100644 index fabc736..0000000 --- a/vendor/github.com/Masterminds/sprig/dict.go +++ /dev/null @@ -1,86 +0,0 @@ -package sprig - -import "github.com/imdario/mergo" - -func set(d map[string]interface{}, key string, value interface{}) map[string]interface{} { - d[key] = value - return d -} - -func unset(d map[string]interface{}, key string) map[string]interface{} { - delete(d, key) - return d -} - -func hasKey(d map[string]interface{}, key string) bool { - _, ok := d[key] - return ok -} - -func pluck(key string, d ...map[string]interface{}) []interface{} { - res := []interface{}{} - for _, dict := range d { - if val, ok := dict[key]; ok { - res = append(res, val) - } - } - return res -} - -func keys(dict map[string]interface{}) []string { - k := []string{} - for key := range dict { - k = append(k, key) - } - return k -} - -func pick(dict map[string]interface{}, keys ...string) map[string]interface{} { - res := map[string]interface{}{} - for _, k := range keys { - if v, ok := dict[k]; ok { - res[k] = v - } - } - return res -} - -func omit(dict map[string]interface{}, keys ...string) map[string]interface{} { - res := map[string]interface{}{} - - omit := make(map[string]bool, len(keys)) - for _, k := range keys { - omit[k] = true - } - - for k, v := range dict { - if _, ok := omit[k]; !ok { - res[k] = v - } - } - return res -} - -func dict(v ...interface{}) map[string]interface{} { - dict := map[string]interface{}{} - lenv := len(v) - for i := 0; i < lenv; i += 2 { - key := strval(v[i]) - if i+1 >= lenv { - dict[key] = "" - continue - } - dict[key] = v[i+1] - } - return dict -} - -func merge(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} { - for _, src := range srcs { - if err := mergo.Merge(&dst, src); err != nil { - // Swallow errors inside of a template. - return "" - } - } - return dst -} diff --git a/vendor/github.com/Masterminds/sprig/doc.go b/vendor/github.com/Masterminds/sprig/doc.go deleted file mode 100644 index 91ba4dc..0000000 --- a/vendor/github.com/Masterminds/sprig/doc.go +++ /dev/null @@ -1,230 +0,0 @@ -/* -Sprig: Template functions for Go. - -This package contains a number of utility functions for working with data -inside of Go `html/template` and `text/template` files. - -To add these functions, use the `template.Funcs()` method: - - t := templates.New("foo").Funcs(sprig.FuncMap()) - -Note that you should add the function map before you parse any template files. - - In several cases, Sprig reverses the order of arguments from the way they - appear in the standard library. This is to make it easier to pipe - arguments into functions. - -Date Functions - - - date FORMAT TIME: Format a date, where a date is an integer type or a time.Time type, and - format is a time.Format formatting string. - - dateModify: Given a date, modify it with a duration: `date_modify "-1.5h" now`. If the duration doesn't - parse, it returns the time unaltered. See `time.ParseDuration` for info on duration strings. - - now: Current time.Time, for feeding into date-related functions. - - htmlDate TIME: Format a date for use in the value field of an HTML "date" form element. - - dateInZone FORMAT TIME TZ: Like date, but takes three arguments: format, timestamp, - timezone. - - htmlDateInZone TIME TZ: Like htmlDate, but takes two arguments: timestamp, - timezone. - -String Functions - - - abbrev: Truncate a string with ellipses. `abbrev 5 "hello world"` yields "he..." - - abbrevboth: Abbreviate from both sides, yielding "...lo wo..." - - trunc: Truncate a string (no suffix). `trunc 5 "Hello World"` yields "hello". - - trim: strings.TrimSpace - - trimAll: strings.Trim, but with the argument order reversed `trimAll "$" "$5.00"` or `"$5.00 | trimAll "$"` - - trimSuffix: strings.TrimSuffix, but with the argument order reversed: `trimSuffix "-" "ends-with-"` - - trimPrefix: strings.TrimPrefix, but with the argument order reversed `trimPrefix "$" "$5"` - - upper: strings.ToUpper - - lower: strings.ToLower - - nospace: Remove all space characters from a string. `nospace "h e l l o"` becomes "hello" - - title: strings.Title - - untitle: Remove title casing - - repeat: strings.Repeat, but with the arguments switched: `repeat count str`. (This simplifies common pipelines) - - substr: Given string, start, and length, return a substr. - - initials: Given a multi-word string, return the initials. `initials "Matt Butcher"` returns "MB" - - randAlphaNum: Given a length, generate a random alphanumeric sequence - - randAlpha: Given a length, generate an alphabetic string - - randAscii: Given a length, generate a random ASCII string (symbols included) - - randNumeric: Given a length, generate a string of digits. - - swapcase: SwapCase swaps the case of a string using a word based algorithm. see https://godoc.org/github.com/Masterminds/goutils#SwapCase - - shuffle: Shuffle randomizes runes in a string and returns the result. It uses default random source in `math/rand` - - snakecase: convert all upper case characters in a string to underscore format. - - camelcase: convert all lower case characters behind underscores to upper case character - - wrap: Force a line wrap at the given width. `wrap 80 "imagine a longer string"` - - wrapWith: Wrap a line at the given length, but using 'sep' instead of a newline. `wrapWith 50, "
", $html` - - contains: strings.Contains, but with the arguments switched: `contains substr str`. (This simplifies common pipelines) - - hasPrefix: strings.hasPrefix, but with the arguments switched - - hasSuffix: strings.hasSuffix, but with the arguments switched - - quote: Wrap string(s) in double quotation marks, escape the contents by adding '\' before '"'. - - squote: Wrap string(s) in double quotation marks, does not escape content. - - cat: Concatenate strings, separating them by spaces. `cat $a $b $c`. - - indent: Indent a string using space characters. `indent 4 "foo\nbar"` produces " foo\n bar" - - nindent: Indent a string using space characters and prepend a new line. `indent 4 "foo\nbar"` produces "\n foo\n bar" - - replace: Replace an old with a new in a string: `$name | replace " " "-"` - - plural: Choose singular or plural based on length: `len $fish | plural "one anchovy" "many anchovies"` - - sha256sum: Generate a hex encoded sha256 hash of the input - - toString: Convert something to a string - -String Slice Functions: - - - join: strings.Join, but as `join SEP SLICE` - - split: strings.Split, but as `split SEP STRING`. The results are returned - as a map with the indexes set to _N, where N is an integer starting from 0. - Use it like this: `{{$v := "foo/bar/baz" | split "/"}}{{$v._0}}` (Prints `foo`) - - splitList: strings.Split, but as `split SEP STRING`. The results are returned - as an array. - - toStrings: convert a list to a list of strings. 'list 1 2 3 | toStrings' produces '["1" "2" "3"]' - - sortAlpha: sort a list lexicographically. - -Integer Slice Functions: - - - until: Given an integer, returns a slice of counting integers from 0 to one - less than the given integer: `range $i, $e := until 5` - - untilStep: Given start, stop, and step, return an integer slice starting at - 'start', stopping at `stop`, and incrementing by 'step. This is the same - as Python's long-form of 'range'. - -Conversions: - - - atoi: Convert a string to an integer. 0 if the integer could not be parsed. - - int64: Convert a string or another numeric type to an int64. - - int: Convert a string or another numeric type to an int. - - float64: Convert a string or another numeric type to a float64. - -Defaults: - - - default: Give a default value. Used like this: trim " "| default "empty". - Since trim produces an empty string, the default value is returned. For - things with a length (strings, slices, maps), len(0) will trigger the default. - For numbers, the value 0 will trigger the default. For booleans, false will - trigger the default. For structs, the default is never returned (there is - no clear empty condition). For everything else, nil value triggers a default. - - empty: Return true if the given value is the zero value for its type. - Caveats: structs are always non-empty. This should match the behavior of - {{if pipeline}}, but can be used inside of a pipeline. - - coalesce: Given a list of items, return the first non-empty one. - This follows the same rules as 'empty'. '{{ coalesce .someVal 0 "hello" }}` - will return `.someVal` if set, or else return "hello". The 0 is skipped - because it is an empty value. - - compact: Return a copy of a list with all of the empty values removed. - 'list 0 1 2 "" | compact' will return '[1 2]' - -OS: - - env: Resolve an environment variable - - expandenv: Expand a string through the environment - -File Paths: - - base: Return the last element of a path. https://golang.org/pkg/path#Base - - dir: Remove the last element of a path. https://golang.org/pkg/path#Dir - - clean: Clean a path to the shortest equivalent name. (e.g. remove "foo/.." - from "foo/../bar.html") https://golang.org/pkg/path#Clean - - ext: https://golang.org/pkg/path#Ext - - isAbs: https://golang.org/pkg/path#IsAbs - -Encoding: - - b64enc: Base 64 encode a string. - - b64dec: Base 64 decode a string. - -Reflection: - - - typeOf: Takes an interface and returns a string representation of the type. - For pointers, this will return a type prefixed with an asterisk(`*`). So - a pointer to type `Foo` will be `*Foo`. - - typeIs: Compares an interface with a string name, and returns true if they match. - Note that a pointer will not match a reference. For example `*Foo` will not - match `Foo`. - - typeIsLike: Compares an interface with a string name and returns true if - the interface is that `name` or that `*name`. In other words, if the given - value matches the given type or is a pointer to the given type, this returns - true. - - kindOf: Takes an interface and returns a string representation of its kind. - - kindIs: Returns true if the given string matches the kind of the given interface. - - Note: None of these can test whether or not something implements a given - interface, since doing so would require compiling the interface in ahead of - time. - -Data Structures: - - - tuple: Takes an arbitrary list of items and returns a slice of items. Its - tuple-ish properties are mainly gained through the template idiom, and not - through an API provided here. WARNING: The implementation of tuple will - change in the future. - - list: An arbitrary ordered list of items. (This is prefered over tuple.) - - dict: Takes a list of name/values and returns a map[string]interface{}. - The first parameter is converted to a string and stored as a key, the - second parameter is treated as the value. And so on, with odds as keys and - evens as values. If the function call ends with an odd, the last key will - be assigned the empty string. Non-string keys are converted to strings as - follows: []byte are converted, fmt.Stringers will have String() called. - errors will have Error() called. All others will be passed through - fmt.Sprtinf("%v"). - -Lists Functions: - -These are used to manipulate lists: '{{ list 1 2 3 | reverse | first }}' - - - first: Get the first item in a 'list'. 'list 1 2 3 | first' prints '1' - - last: Get the last item in a 'list': 'list 1 2 3 | last ' prints '3' - - rest: Get all but the first item in a list: 'list 1 2 3 | rest' returns '[2 3]' - - initial: Get all but the last item in a list: 'list 1 2 3 | initial' returns '[1 2]' - - append: Add an item to the end of a list: 'append $list 4' adds '4' to the end of '$list' - - prepend: Add an item to the beginning of a list: 'prepend $list 4' puts 4 at the beginning of the list. - - reverse: Reverse the items in a list. - - uniq: Remove duplicates from a list. - - without: Return a list with the given values removed: 'without (list 1 2 3) 1' would return '[2 3]' - - has: Return 'true' if the item is found in the list: 'has "foo" $list' will return 'true' if the list contains "foo" - -Dict Functions: - -These are used to manipulate dicts. - - - set: Takes a dict, a key, and a value, and sets that key/value pair in - the dict. `set $dict $key $value`. For convenience, it returns the dict, - even though the dict was modified in place. - - unset: Takes a dict and a key, and deletes that key/value pair from the - dict. `unset $dict $key`. This returns the dict for convenience. - - hasKey: Takes a dict and a key, and returns boolean true if the key is in - the dict. - - pluck: Given a key and one or more maps, get all of the values for that key. - - keys: Get an array of all of the keys in a dict. - - pick: Select just the given keys out of the dict, and return a new dict. - - omit: Return a dict without the given keys. - -Math Functions: - -Integer functions will convert integers of any width to `int64`. If a -string is passed in, functions will attempt to convert with -`strconv.ParseInt(s, 1064)`. If this fails, the value will be treated as 0. - - - add1: Increment an integer by 1 - - add: Sum an arbitrary number of integers - - sub: Subtract the second integer from the first - - div: Divide the first integer by the second - - mod: Module of first integer divided by second - - mul: Multiply integers - - max: Return the biggest of a series of one or more integers - - min: Return the smallest of a series of one or more integers - - biggest: DEPRECATED. Return the biggest of a series of one or more integers - -Crypto Functions: - - - genPrivateKey: Generate a private key for the given cryptosystem. If no - argument is supplied, by default it will generate a private key using - the RSA algorithm. Accepted values are `rsa`, `dsa`, and `ecdsa`. - - derivePassword: Derive a password from the given parameters according to the ["Master Password" algorithm](http://masterpasswordapp.com/algorithm.html) - Given parameters (in order) are: - `counter` (starting with 1), `password_type` (maximum, long, medium, short, basic, or pin), `password`, - `user`, and `site` - -SemVer Functions: - -These functions provide version parsing and comparisons for SemVer 2 version -strings. - - - semver: Parse a semantic version and return a Version object. - - semverCompare: Compare a SemVer range to a particular version. -*/ -package sprig diff --git a/vendor/github.com/Masterminds/sprig/functions.go b/vendor/github.com/Masterminds/sprig/functions.go deleted file mode 100644 index c36dc55..0000000 --- a/vendor/github.com/Masterminds/sprig/functions.go +++ /dev/null @@ -1,278 +0,0 @@ -package sprig - -import ( - "errors" - "html/template" - "os" - "path" - "strconv" - "strings" - ttemplate "text/template" - "time" - - util "github.com/aokoli/goutils" - "github.com/huandu/xstrings" -) - -// Produce the function map. -// -// Use this to pass the functions into the template engine: -// -// tpl := template.New("foo").Funcs(sprig.FuncMap())) -// -func FuncMap() template.FuncMap { - return HtmlFuncMap() -} - -// HermeticTextFuncMap returns a 'text/template'.FuncMap with only repeatable functions. -func HermeticTxtFuncMap() ttemplate.FuncMap { - r := TxtFuncMap() - for _, name := range nonhermeticFunctions { - delete(r, name) - } - return r -} - -// HermeticHtmlFuncMap returns an 'html/template'.Funcmap with only repeatable functions. -func HermeticHtmlFuncMap() template.FuncMap { - r := HtmlFuncMap() - for _, name := range nonhermeticFunctions { - delete(r, name) - } - return r -} - -// TextFuncMap returns a 'text/template'.FuncMap -func TxtFuncMap() ttemplate.FuncMap { - return ttemplate.FuncMap(GenericFuncMap()) -} - -// HtmlFuncMap returns an 'html/template'.Funcmap -func HtmlFuncMap() template.FuncMap { - return template.FuncMap(GenericFuncMap()) -} - -// GenericFuncMap returns a copy of the basic function map as a map[string]interface{}. -func GenericFuncMap() map[string]interface{} { - gfm := make(map[string]interface{}, len(genericMap)) - for k, v := range genericMap { - gfm[k] = v - } - return gfm -} - -// These functions are not guaranteed to evaluate to the same result for given input, because they -// refer to the environemnt or global state. -var nonhermeticFunctions = []string{ - // Date functions - "date", - "date_in_zone", - "date_modify", - "now", - "htmlDate", - "htmlDateInZone", - "dateInZone", - "dateModify", - - // Strings - "randAlphaNum", - "randAlpha", - "randAscii", - "randNumeric", - "uuidv4", - - // OS - "env", - "expandenv", -} - -var genericMap = map[string]interface{}{ - "hello": func() string { return "Hello!" }, - - // Date functions - "date": date, - "date_in_zone": dateInZone, - "date_modify": dateModify, - "now": func() time.Time { return time.Now() }, - "htmlDate": htmlDate, - "htmlDateInZone": htmlDateInZone, - "dateInZone": dateInZone, - "dateModify": dateModify, - "ago": dateAgo, - "toDate": toDate, - - // Strings - "abbrev": abbrev, - "abbrevboth": abbrevboth, - "trunc": trunc, - "trim": strings.TrimSpace, - "upper": strings.ToUpper, - "lower": strings.ToLower, - "title": strings.Title, - "untitle": untitle, - "substr": substring, - // Switch order so that "foo" | repeat 5 - "repeat": func(count int, str string) string { return strings.Repeat(str, count) }, - // Deprecated: Use trimAll. - "trimall": func(a, b string) string { return strings.Trim(b, a) }, - // Switch order so that "$foo" | trimall "$" - "trimAll": func(a, b string) string { return strings.Trim(b, a) }, - "trimSuffix": func(a, b string) string { return strings.TrimSuffix(b, a) }, - "trimPrefix": func(a, b string) string { return strings.TrimPrefix(b, a) }, - "nospace": util.DeleteWhiteSpace, - "initials": initials, - "randAlphaNum": randAlphaNumeric, - "randAlpha": randAlpha, - "randAscii": randAscii, - "randNumeric": randNumeric, - "swapcase": util.SwapCase, - "shuffle": xstrings.Shuffle, - "snakecase": xstrings.ToSnakeCase, - "camelcase": xstrings.ToCamelCase, - "wrap": func(l int, s string) string { return util.Wrap(s, l) }, - "wrapWith": func(l int, sep, str string) string { return util.WrapCustom(str, l, sep, true) }, - // Switch order so that "foobar" | contains "foo" - "contains": func(substr string, str string) bool { return strings.Contains(str, substr) }, - "hasPrefix": func(substr string, str string) bool { return strings.HasPrefix(str, substr) }, - "hasSuffix": func(substr string, str string) bool { return strings.HasSuffix(str, substr) }, - "quote": quote, - "squote": squote, - "cat": cat, - "indent": indent, - "nindent": nindent, - "replace": replace, - "plural": plural, - "sha256sum": sha256sum, - "toString": strval, - - // Wrap Atoi to stop errors. - "atoi": func(a string) int { i, _ := strconv.Atoi(a); return i }, - "int64": toInt64, - "int": toInt, - "float64": toFloat64, - - //"gt": func(a, b int) bool {return a > b}, - //"gte": func(a, b int) bool {return a >= b}, - //"lt": func(a, b int) bool {return a < b}, - //"lte": func(a, b int) bool {return a <= b}, - - // split "/" foo/bar returns map[int]string{0: foo, 1: bar} - "split": split, - "splitList": func(sep, orig string) []string { return strings.Split(orig, sep) }, - "toStrings": strslice, - - "until": until, - "untilStep": untilStep, - - // VERY basic arithmetic. - "add1": func(i interface{}) int64 { return toInt64(i) + 1 }, - "add": func(i ...interface{}) int64 { - var a int64 = 0 - for _, b := range i { - a += toInt64(b) - } - return a - }, - "sub": func(a, b interface{}) int64 { return toInt64(a) - toInt64(b) }, - "div": func(a, b interface{}) int64 { return toInt64(a) / toInt64(b) }, - "mod": func(a, b interface{}) int64 { return toInt64(a) % toInt64(b) }, - "mul": func(a interface{}, v ...interface{}) int64 { - val := toInt64(a) - for _, b := range v { - val = val * toInt64(b) - } - return val - }, - "biggest": max, - "max": max, - "min": min, - "ceil": ceil, - "floor": floor, - "round": round, - - // string slices. Note that we reverse the order b/c that's better - // for template processing. - "join": join, - "sortAlpha": sortAlpha, - - // Defaults - "default": dfault, - "empty": empty, - "coalesce": coalesce, - "compact": compact, - "toJson": toJson, - "toPrettyJson": toPrettyJson, - - // Reflection - "typeOf": typeOf, - "typeIs": typeIs, - "typeIsLike": typeIsLike, - "kindOf": kindOf, - "kindIs": kindIs, - - // OS: - "env": func(s string) string { return os.Getenv(s) }, - "expandenv": func(s string) string { return os.ExpandEnv(s) }, - - // File Paths: - "base": path.Base, - "dir": path.Dir, - "clean": path.Clean, - "ext": path.Ext, - "isAbs": path.IsAbs, - - // Encoding: - "b64enc": base64encode, - "b64dec": base64decode, - "b32enc": base32encode, - "b32dec": base32decode, - - // Data Structures: - "tuple": list, // FIXME: with the addition of append/prepend these are no longer immutable. - "list": list, - "dict": dict, - "set": set, - "unset": unset, - "hasKey": hasKey, - "pluck": pluck, - "keys": keys, - "pick": pick, - "omit": omit, - "merge": merge, - - "append": push, "push": push, - "prepend": prepend, - "first": first, - "rest": rest, - "last": last, - "initial": initial, - "reverse": reverse, - "uniq": uniq, - "without": without, - "has": has, - - // Crypto: - "genPrivateKey": generatePrivateKey, - "derivePassword": derivePassword, - "genCA": generateCertificateAuthority, - "genSelfSignedCert": generateSelfSignedCertificate, - "genSignedCert": generateSignedCertificate, - - // UUIDs: - "uuidv4": uuidv4, - - // SemVer: - "semver": semver, - "semverCompare": semverCompare, - - // Flow Control: - "fail": func(msg string) (string, error) { return "", errors.New(msg) }, - - // Regex - "regexMatch": regexMatch, - "regexFindAll": regexFindAll, - "regexFind": regexFind, - "regexReplaceAll": regexReplaceAll, - "regexReplaceAllLiteral": regexReplaceAllLiteral, - "regexSplit": regexSplit, -} diff --git a/vendor/github.com/Masterminds/sprig/glide.lock b/vendor/github.com/Masterminds/sprig/glide.lock deleted file mode 100644 index faf04f9..0000000 --- a/vendor/github.com/Masterminds/sprig/glide.lock +++ /dev/null @@ -1,33 +0,0 @@ -hash: b9cc40bfd6dde74a94103b96700df1a9ab29a7fff5650216cf5a05f4fe72fb73 -updated: 2017-05-02T16:01:04.617727646-06:00 -imports: -- name: github.com/aokoli/goutils - version: 9c37978a95bd5c709a15883b6242714ea6709e64 -- name: github.com/huandu/xstrings - version: 3959339b333561bf62a38b424fd41517c2c90f40 -- name: github.com/imdario/mergo - version: 3e95a51e0639b4cf372f2ccf74c86749d747fbdc -- name: github.com/Masterminds/goutils - version: 45307ec16e3cd47cd841506c081f7afd8237d210 -- name: github.com/Masterminds/semver - version: 59c29afe1a994eacb71c833025ca7acf874bb1da -- name: github.com/satori/go.uuid - version: 879c5887cd475cd7864858769793b2ceb0d44feb -- name: github.com/stretchr/testify - version: e3a8ff8ce36581f87a15341206f205b1da467059 - subpackages: - - assert -- name: golang.org/x/crypto - version: d172538b2cfce0c13cee31e647d0367aa8cd2486 - subpackages: - - pbkdf2 - - scrypt -testImports: -- name: github.com/davecgh/go-spew - version: 5215b55f46b2b919f50a1df0eaa5886afe4e3b3d - subpackages: - - spew -- name: github.com/pmezard/go-difflib - version: d8ed2627bdf02c080bf22230dbb337003b7aba2d - subpackages: - - difflib diff --git a/vendor/github.com/Masterminds/sprig/glide.yaml b/vendor/github.com/Masterminds/sprig/glide.yaml deleted file mode 100644 index 5e82dcf..0000000 --- a/vendor/github.com/Masterminds/sprig/glide.yaml +++ /dev/null @@ -1,15 +0,0 @@ -package: github.com/Masterminds/sprig -import: -- package: github.com/Masterminds/goutils - version: ^1.0.0 -- package: github.com/satori/go.uuid - version: ^1.1.0 -- package: golang.org/x/crypto - subpackages: - - scrypt -- package: github.com/Masterminds/semver - version: v1.2.2 -- package: github.com/stretchr/testify -- package: github.com/imdario/mergo - version: ~0.2.2 -- package: github.com/huandu/xstrings diff --git a/vendor/github.com/Masterminds/sprig/list.go b/vendor/github.com/Masterminds/sprig/list.go deleted file mode 100644 index 1860549..0000000 --- a/vendor/github.com/Masterminds/sprig/list.go +++ /dev/null @@ -1,259 +0,0 @@ -package sprig - -import ( - "fmt" - "reflect" - "sort" -) - -// Reflection is used in these functions so that slices and arrays of strings, -// ints, and other types not implementing []interface{} can be worked with. -// For example, this is useful if you need to work on the output of regexs. - -func list(v ...interface{}) []interface{} { - return v -} - -func push(list interface{}, v interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[i] = l2.Index(i).Interface() - } - - return append(nl, v) - - default: - panic(fmt.Sprintf("Cannot push on type %s", tp)) - } -} - -func prepend(list interface{}, v interface{}) []interface{} { - //return append([]interface{}{v}, list...) - - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[i] = l2.Index(i).Interface() - } - - return append([]interface{}{v}, nl...) - - default: - panic(fmt.Sprintf("Cannot prepend on type %s", tp)) - } -} - -func last(list interface{}) interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - return l2.Index(l - 1).Interface() - default: - panic(fmt.Sprintf("Cannot find last on type %s", tp)) - } -} - -func first(list interface{}) interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - return l2.Index(0).Interface() - default: - panic(fmt.Sprintf("Cannot find first on type %s", tp)) - } -} - -func rest(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - nl := make([]interface{}, l-1) - for i := 1; i < l; i++ { - nl[i-1] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find rest on type %s", tp)) - } -} - -func initial(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - if l == 0 { - return nil - } - - nl := make([]interface{}, l-1) - for i := 0; i < l-1; i++ { - nl[i] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find initial on type %s", tp)) - } -} - -func sortAlpha(list interface{}) []string { - k := reflect.Indirect(reflect.ValueOf(list)).Kind() - switch k { - case reflect.Slice, reflect.Array: - a := strslice(list) - s := sort.StringSlice(a) - s.Sort() - return s - } - return []string{strval(list)} -} - -func reverse(v interface{}) []interface{} { - tp := reflect.TypeOf(v).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(v) - - l := l2.Len() - // We do not sort in place because the incoming array should not be altered. - nl := make([]interface{}, l) - for i := 0; i < l; i++ { - nl[l-i-1] = l2.Index(i).Interface() - } - - return nl - default: - panic(fmt.Sprintf("Cannot find reverse on type %s", tp)) - } -} - -func compact(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - nl := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !empty(item) { - nl = append(nl, item) - } - } - - return nl - default: - panic(fmt.Sprintf("Cannot compact on type %s", tp)) - } -} - -func uniq(list interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - dest := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !inList(dest, item) { - dest = append(dest, item) - } - } - - return dest - default: - panic(fmt.Sprintf("Cannot find uniq on type %s", tp)) - } -} - -func inList(haystack []interface{}, needle interface{}) bool { - for _, h := range haystack { - if reflect.DeepEqual(needle, h) { - return true - } - } - return false -} - -func without(list interface{}, omit ...interface{}) []interface{} { - tp := reflect.TypeOf(list).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(list) - - l := l2.Len() - res := []interface{}{} - var item interface{} - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if !inList(omit, item) { - res = append(res, item) - } - } - - return res - default: - panic(fmt.Sprintf("Cannot find without on type %s", tp)) - } -} - -func has(needle interface{}, haystack interface{}) bool { - tp := reflect.TypeOf(haystack).Kind() - switch tp { - case reflect.Slice, reflect.Array: - l2 := reflect.ValueOf(haystack) - var item interface{} - l := l2.Len() - for i := 0; i < l; i++ { - item = l2.Index(i).Interface() - if reflect.DeepEqual(needle, item) { - return true - } - } - - return false - default: - panic(fmt.Sprintf("Cannot find has on type %s", tp)) - } -} diff --git a/vendor/github.com/Masterminds/sprig/numeric.go b/vendor/github.com/Masterminds/sprig/numeric.go deleted file mode 100644 index 209c62e..0000000 --- a/vendor/github.com/Masterminds/sprig/numeric.go +++ /dev/null @@ -1,159 +0,0 @@ -package sprig - -import ( - "math" - "reflect" - "strconv" -) - -// toFloat64 converts 64-bit floats -func toFloat64(v interface{}) float64 { - if str, ok := v.(string); ok { - iv, err := strconv.ParseFloat(str, 64) - if err != nil { - return 0 - } - return iv - } - - val := reflect.Indirect(reflect.ValueOf(v)) - switch val.Kind() { - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return float64(val.Int()) - case reflect.Uint8, reflect.Uint16, reflect.Uint32: - return float64(val.Uint()) - case reflect.Uint, reflect.Uint64: - return float64(val.Uint()) - case reflect.Float32, reflect.Float64: - return val.Float() - case reflect.Bool: - if val.Bool() == true { - return 1 - } - return 0 - default: - return 0 - } -} - -func toInt(v interface{}) int { - //It's not optimal. Bud I don't want duplicate toInt64 code. - return int(toInt64(v)) -} - -// toInt64 converts integer types to 64-bit integers -func toInt64(v interface{}) int64 { - if str, ok := v.(string); ok { - iv, err := strconv.ParseInt(str, 10, 64) - if err != nil { - return 0 - } - return iv - } - - val := reflect.Indirect(reflect.ValueOf(v)) - switch val.Kind() { - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return val.Int() - case reflect.Uint8, reflect.Uint16, reflect.Uint32: - return int64(val.Uint()) - case reflect.Uint, reflect.Uint64: - tv := val.Uint() - if tv <= math.MaxInt64 { - return int64(tv) - } - // TODO: What is the sensible thing to do here? - return math.MaxInt64 - case reflect.Float32, reflect.Float64: - return int64(val.Float()) - case reflect.Bool: - if val.Bool() == true { - return 1 - } - return 0 - default: - return 0 - } -} - -func max(a interface{}, i ...interface{}) int64 { - aa := toInt64(a) - for _, b := range i { - bb := toInt64(b) - if bb > aa { - aa = bb - } - } - return aa -} - -func min(a interface{}, i ...interface{}) int64 { - aa := toInt64(a) - for _, b := range i { - bb := toInt64(b) - if bb < aa { - aa = bb - } - } - return aa -} - -func until(count int) []int { - step := 1 - if count < 0 { - step = -1 - } - return untilStep(0, count, step) -} - -func untilStep(start, stop, step int) []int { - v := []int{} - - if stop < start { - if step >= 0 { - return v - } - for i := start; i > stop; i += step { - v = append(v, i) - } - return v - } - - if step <= 0 { - return v - } - for i := start; i < stop; i += step { - v = append(v, i) - } - return v -} - -func floor(a interface{}) float64 { - aa := toFloat64(a) - return math.Floor(aa) -} - -func ceil(a interface{}) float64 { - aa := toFloat64(a) - return math.Ceil(aa) -} - -func round(a interface{}, p int, r_opt ...float64) float64 { - roundOn := .5 - if len(r_opt) > 0 { - roundOn = r_opt[0] - } - val := toFloat64(a) - places := toFloat64(p) - - var round float64 - pow := math.Pow(10, places) - digit := pow * val - _, div := math.Modf(digit) - if div >= roundOn { - round = math.Ceil(digit) - } else { - round = math.Floor(digit) - } - return round / pow -} \ No newline at end of file diff --git a/vendor/github.com/Masterminds/sprig/reflect.go b/vendor/github.com/Masterminds/sprig/reflect.go deleted file mode 100644 index 8a65c13..0000000 --- a/vendor/github.com/Masterminds/sprig/reflect.go +++ /dev/null @@ -1,28 +0,0 @@ -package sprig - -import ( - "fmt" - "reflect" -) - -// typeIs returns true if the src is the type named in target. -func typeIs(target string, src interface{}) bool { - return target == typeOf(src) -} - -func typeIsLike(target string, src interface{}) bool { - t := typeOf(src) - return target == t || "*"+target == t -} - -func typeOf(src interface{}) string { - return fmt.Sprintf("%T", src) -} - -func kindIs(target string, src interface{}) bool { - return target == kindOf(src) -} - -func kindOf(src interface{}) string { - return reflect.ValueOf(src).Kind().String() -} diff --git a/vendor/github.com/Masterminds/sprig/regex.go b/vendor/github.com/Masterminds/sprig/regex.go deleted file mode 100644 index 9fe033a..0000000 --- a/vendor/github.com/Masterminds/sprig/regex.go +++ /dev/null @@ -1,35 +0,0 @@ -package sprig - -import ( - "regexp" -) - -func regexMatch(regex string, s string) bool { - match, _ := regexp.MatchString(regex, s) - return match -} - -func regexFindAll(regex string, s string, n int) []string { - r := regexp.MustCompile(regex) - return r.FindAllString(s, n) -} - -func regexFind(regex string, s string) string { - r := regexp.MustCompile(regex) - return r.FindString(s) -} - -func regexReplaceAll(regex string, s string, repl string) string { - r := regexp.MustCompile(regex) - return r.ReplaceAllString(s, repl) -} - -func regexReplaceAllLiteral(regex string, s string, repl string) string { - r := regexp.MustCompile(regex) - return r.ReplaceAllLiteralString(s, repl) -} - -func regexSplit(regex string, s string, n int) []string { - r := regexp.MustCompile(regex) - return r.Split(s, n) -} \ No newline at end of file diff --git a/vendor/github.com/Masterminds/sprig/semver.go b/vendor/github.com/Masterminds/sprig/semver.go deleted file mode 100644 index c2bf8a1..0000000 --- a/vendor/github.com/Masterminds/sprig/semver.go +++ /dev/null @@ -1,23 +0,0 @@ -package sprig - -import ( - sv2 "github.com/Masterminds/semver" -) - -func semverCompare(constraint, version string) (bool, error) { - c, err := sv2.NewConstraint(constraint) - if err != nil { - return false, err - } - - v, err := sv2.NewVersion(version) - if err != nil { - return false, err - } - - return c.Check(v), nil -} - -func semver(version string) (*sv2.Version, error) { - return sv2.NewVersion(version) -} diff --git a/vendor/github.com/Masterminds/sprig/strings.go b/vendor/github.com/Masterminds/sprig/strings.go deleted file mode 100644 index f6afa2f..0000000 --- a/vendor/github.com/Masterminds/sprig/strings.go +++ /dev/null @@ -1,201 +0,0 @@ -package sprig - -import ( - "encoding/base32" - "encoding/base64" - "fmt" - "reflect" - "strconv" - "strings" - - util "github.com/aokoli/goutils" -) - -func base64encode(v string) string { - return base64.StdEncoding.EncodeToString([]byte(v)) -} - -func base64decode(v string) string { - data, err := base64.StdEncoding.DecodeString(v) - if err != nil { - return err.Error() - } - return string(data) -} - -func base32encode(v string) string { - return base32.StdEncoding.EncodeToString([]byte(v)) -} - -func base32decode(v string) string { - data, err := base32.StdEncoding.DecodeString(v) - if err != nil { - return err.Error() - } - return string(data) -} - -func abbrev(width int, s string) string { - if width < 4 { - return s - } - r, _ := util.Abbreviate(s, width) - return r -} - -func abbrevboth(left, right int, s string) string { - if right < 4 || left > 0 && right < 7 { - return s - } - r, _ := util.AbbreviateFull(s, left, right) - return r -} -func initials(s string) string { - // Wrap this just to eliminate the var args, which templates don't do well. - return util.Initials(s) -} - -func randAlphaNumeric(count int) string { - // It is not possible, it appears, to actually generate an error here. - r, _ := util.RandomAlphaNumeric(count) - return r -} - -func randAlpha(count int) string { - r, _ := util.RandomAlphabetic(count) - return r -} - -func randAscii(count int) string { - r, _ := util.RandomAscii(count) - return r -} - -func randNumeric(count int) string { - r, _ := util.RandomNumeric(count) - return r -} - -func untitle(str string) string { - return util.Uncapitalize(str) -} - -func quote(str ...interface{}) string { - out := make([]string, len(str)) - for i, s := range str { - out[i] = fmt.Sprintf("%q", strval(s)) - } - return strings.Join(out, " ") -} - -func squote(str ...interface{}) string { - out := make([]string, len(str)) - for i, s := range str { - out[i] = fmt.Sprintf("'%v'", s) - } - return strings.Join(out, " ") -} - -func cat(v ...interface{}) string { - r := strings.TrimSpace(strings.Repeat("%v ", len(v))) - return fmt.Sprintf(r, v...) -} - -func indent(spaces int, v string) string { - pad := strings.Repeat(" ", spaces) - return pad + strings.Replace(v, "\n", "\n"+pad, -1) -} - -func nindent(spaces int, v string) string { - return "\n" + indent(spaces, v) -} - -func replace(old, new, src string) string { - return strings.Replace(src, old, new, -1) -} - -func plural(one, many string, count int) string { - if count == 1 { - return one - } - return many -} - -func strslice(v interface{}) []string { - switch v := v.(type) { - case []string: - return v - case []interface{}: - l := len(v) - b := make([]string, l) - for i := 0; i < l; i++ { - b[i] = strval(v[i]) - } - return b - default: - val := reflect.ValueOf(v) - switch val.Kind() { - case reflect.Array, reflect.Slice: - l := val.Len() - b := make([]string, l) - for i := 0; i < l; i++ { - b[i] = strval(val.Index(i).Interface()) - } - return b - default: - return []string{strval(v)} - } - } -} - -func strval(v interface{}) string { - switch v := v.(type) { - case string: - return v - case []byte: - return string(v) - case error: - return v.Error() - case fmt.Stringer: - return v.String() - default: - return fmt.Sprintf("%v", v) - } -} - -func trunc(c int, s string) string { - if len(s) <= c { - return s - } - return s[0:c] -} - -func join(sep string, v interface{}) string { - return strings.Join(strslice(v), sep) -} - -func split(sep, orig string) map[string]string { - parts := strings.Split(orig, sep) - res := make(map[string]string, len(parts)) - for i, v := range parts { - res["_"+strconv.Itoa(i)] = v - } - return res -} - -// substring creates a substring of the given string. -// -// If start is < 0, this calls string[:length]. -// -// If start is >= 0 and length < 0, this calls string[start:] -// -// Otherwise, this calls string[start, length]. -func substring(start, length int, s string) string { - if start < 0 { - return s[:length] - } - if length < 0 { - return s[start:] - } - return s[start:length] -} diff --git a/vendor/github.com/aokoli/goutils/LICENSE.txt b/vendor/github.com/aokoli/goutils/LICENSE.txt deleted file mode 100644 index d645695..0000000 --- a/vendor/github.com/aokoli/goutils/LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/aokoli/goutils/README.md b/vendor/github.com/aokoli/goutils/README.md deleted file mode 100644 index 163ffe7..0000000 --- a/vendor/github.com/aokoli/goutils/README.md +++ /dev/null @@ -1,70 +0,0 @@ -GoUtils -=========== -[![Stability: Maintenance](https://masterminds.github.io/stability/maintenance.svg)](https://masterminds.github.io/stability/maintenance.html) -[![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) [![Build Status](https://travis-ci.org/Masterminds/goutils.svg?branch=master)](https://travis-ci.org/Masterminds/goutils) [![Build status](https://ci.appveyor.com/api/projects/status/sc2b1ew0m7f0aiju?svg=true)](https://ci.appveyor.com/project/mattfarina/goutils) - - -GoUtils provides users with utility functions to manipulate strings in various ways. It is a Go implementation of some -string manipulation libraries of Java Apache Commons. GoUtils includes the following Java Apache Commons classes: -* WordUtils -* RandomStringUtils -* StringUtils (partial implementation) - -## Installation -If you have Go set up on your system, from the GOPATH directory within the command line/terminal, enter this: - - go get github.com/Masterminds/goutils - -If you do not have Go set up on your system, please follow the [Go installation directions from the documenation](http://golang.org/doc/install), and then follow the instructions above to install GoUtils. - - -## Documentation -GoUtils doc is available here: [![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) - - -## Usage -The code snippets below show examples of how to use GoUtils. Some functions return errors while others do not. The first instance below, which does not return an error, is the `Initials` function (located within the `wordutils.go` file). - - package main - - import ( - "fmt" - "github.com/Masterminds/goutils" - ) - - func main() { - - // EXAMPLE 1: A goutils function which returns no errors - fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF" - - } -Some functions return errors mainly due to illegal arguements used as parameters. The code example below illustrates how to deal with function that returns an error. In this instance, the function is the `Random` function (located within the `randomstringutils.go` file). - - package main - - import ( - "fmt" - "github.com/Masterminds/goutils" - ) - - func main() { - - // EXAMPLE 2: A goutils function which returns an error - rand1, err1 := goutils.Random (-1, 0, 0, true, true) - - if err1 != nil { - fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...) - } else { - fmt.Println(rand1) - } - - } - -## License -GoUtils is licensed under the Apache License, Version 2.0. Please check the LICENSE.txt file or visit http://www.apache.org/licenses/LICENSE-2.0 for a copy of the license. - -## Issue Reporting -Make suggestions or report issues using the Git issue tracker: https://github.com/Masterminds/goutils/issues - -## Website -* [GoUtils webpage](http://Masterminds.github.io/goutils/) diff --git a/vendor/github.com/aokoli/goutils/appveyor.yml b/vendor/github.com/aokoli/goutils/appveyor.yml deleted file mode 100644 index 657564a..0000000 --- a/vendor/github.com/aokoli/goutils/appveyor.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\goutils -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -build: off - -install: - - go version - - go env - -test_script: - - go test -v - -deploy: off diff --git a/vendor/github.com/aokoli/goutils/randomstringutils.go b/vendor/github.com/aokoli/goutils/randomstringutils.go deleted file mode 100644 index eff2e7d..0000000 --- a/vendor/github.com/aokoli/goutils/randomstringutils.go +++ /dev/null @@ -1,250 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package goutils - -import ( - "fmt" - "math" - "math/rand" - "time" - "unicode" -) - -// RANDOM provides the time-based seed used to generate random numbers -var RANDOM = rand.New(rand.NewSource(time.Now().UnixNano())) - -/* -RandomNonAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomNonAlphaNumeric(count int) (string, error) { - return RandomAlphaNumericCustom(count, false, false) -} - -/* -RandomAscii creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive). - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAscii(count int) (string, error) { - return Random(count, 32, 127, false, false) -} - -/* -RandomNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomNumeric(count int) (string, error) { - return Random(count, 0, 0, false, true) -} - -/* -RandomAlphabetic creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphabetic(count int) (string, error) { - return Random(count, 0, 0, true, false) -} - -/* -RandomAlphaNumeric creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters. - -Parameter: - count - the length of random string to create - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphaNumeric(count int) (string, error) { - return Random(count, 0, 0, true, true) -} - -/* -RandomAlphaNumericCustom creates a random string whose length is the number of characters specified. -Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments. - -Parameters: - count - the length of random string to create - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func RandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) { - return Random(count, 0, 0, letters, numbers) -} - -/* -Random creates a random string based on a variety of options, using default source of randomness. -This method has exactly the same semantics as RandomSeed(int, int, int, bool, bool, []char, *rand.Rand), but -instead of using an externally supplied source of randomness, it uses the internal *rand.Rand instance. - -Parameters: - count - the length of random string to create - start - the position in set of chars (ASCII/Unicode int) to start at - end - the position in set of chars (ASCII/Unicode int) to end before - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars. - -Returns: - string - the random string - error - an error stemming from an invalid parameter within underlying function, RandomSeed(...) -*/ -func Random(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) { - return RandomSeed(count, start, end, letters, numbers, chars, RANDOM) -} - -/* -RandomSeed creates a random string based on a variety of options, using supplied source of randomness. -If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used, -unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively. -If chars is not nil, characters stored in chars that are between start and end are chosen. -This method accepts a user-supplied *rand.Rand instance to use as a source of randomness. By seeding a single *rand.Rand instance -with a fixed seed and using it for each call, the same random sequence of strings can be generated repeatedly and predictably. - -Parameters: - count - the length of random string to create - start - the position in set of chars (ASCII/Unicode decimals) to start at - end - the position in set of chars (ASCII/Unicode decimals) to end before - letters - if true, generated string may include alphabetic characters - numbers - if true, generated string may include numeric characters - chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars. - random - a source of randomness. - -Returns: - string - the random string - error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars) -*/ -func RandomSeed(count int, start int, end int, letters bool, numbers bool, chars []rune, random *rand.Rand) (string, error) { - - if count == 0 { - return "", nil - } else if count < 0 { - err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...") - return "", err - } - if chars != nil && len(chars) == 0 { - err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty") - return "", err - } - - if start == 0 && end == 0 { - if chars != nil { - end = len(chars) - } else { - if !letters && !numbers { - end = math.MaxInt32 - } else { - end = 'z' + 1 - start = ' ' - } - } - } else { - if end <= start { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start) - return "", err - } - - if chars != nil && end > len(chars) { - err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars)) - return "", err - } - } - - buffer := make([]rune, count) - gap := end - start - - // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319 - // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343 - - for count != 0 { - count-- - var ch rune - if chars == nil { - ch = rune(random.Intn(gap) + start) - } else { - ch = chars[random.Intn(gap)+start] - } - - if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers { - if ch >= 56320 && ch <= 57343 { // low surrogate range - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = ch - count-- - // Insert high surrogate - buffer[count] = rune(55296 + random.Intn(128)) - } - } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial) - if count == 0 { - count++ - } else { - // Insert low surrogate - buffer[count] = rune(56320 + random.Intn(128)) - count-- - // Insert high surrogate - buffer[count] = ch - } - } else if ch >= 56192 && ch <= 56319 { - // private high surrogate, skip it - count++ - } else { - // not one of the surrogates* - buffer[count] = ch - } - } else { - count++ - } - } - return string(buffer), nil -} diff --git a/vendor/github.com/aokoli/goutils/stringutils.go b/vendor/github.com/aokoli/goutils/stringutils.go deleted file mode 100644 index 5037c45..0000000 --- a/vendor/github.com/aokoli/goutils/stringutils.go +++ /dev/null @@ -1,224 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package goutils - -import ( - "bytes" - "fmt" - "strings" - "unicode" -) - -// Typically returned by functions where a searched item cannot be found -const INDEX_NOT_FOUND = -1 - -/* -Abbreviate abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "Now is the time for..." - -Specifically, the algorithm is as follows: - - - If str is less than maxWidth characters long, return it. - - Else abbreviate it to (str[0:maxWidth - 3] + "..."). - - If maxWidth is less than 4, return an illegal argument error. - - In no case will it return a string of length greater than maxWidth. - -Parameters: - str - the string to check - maxWidth - maximum length of result string, must be at least 4 - -Returns: - string - abbreviated string - error - if the width is too small -*/ -func Abbreviate(str string, maxWidth int) (string, error) { - return AbbreviateFull(str, 0, maxWidth) -} - -/* -AbbreviateFull abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "...is the time for..." -This function works like Abbreviate(string, int), but allows you to specify a "left edge" offset. Note that this left edge is not -necessarily going to be the leftmost character in the result, or the first character following the ellipses, but it will appear -somewhere in the result. -In no case will it return a string of length greater than maxWidth. - -Parameters: - str - the string to check - offset - left edge of source string - maxWidth - maximum length of result string, must be at least 4 - -Returns: - string - abbreviated string - error - if the width is too small -*/ -func AbbreviateFull(str string, offset int, maxWidth int) (string, error) { - if str == "" { - return "", nil - } - if maxWidth < 4 { - err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width is 4") - return "", err - } - if len(str) <= maxWidth { - return str, nil - } - if offset > len(str) { - offset = len(str) - } - if len(str)-offset < (maxWidth - 3) { // 15 - 5 < 10 - 3 = 10 < 7 - offset = len(str) - (maxWidth - 3) - } - abrevMarker := "..." - if offset <= 4 { - return str[0:maxWidth-3] + abrevMarker, nil // str.substring(0, maxWidth - 3) + abrevMarker; - } - if maxWidth < 7 { - err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width with offset is 7") - return "", err - } - if (offset + maxWidth - 3) < len(str) { // 5 + (10-3) < 15 = 12 < 15 - abrevStr, _ := Abbreviate(str[offset:len(str)], (maxWidth - 3)) - return abrevMarker + abrevStr, nil // abrevMarker + abbreviate(str.substring(offset), maxWidth - 3); - } - return abrevMarker + str[(len(str)-(maxWidth-3)):len(str)], nil // abrevMarker + str.substring(str.length() - (maxWidth - 3)); -} - -/* -DeleteWhiteSpace deletes all whitespaces from a string as defined by unicode.IsSpace(rune). -It returns the string without whitespaces. - -Parameter: - str - the string to delete whitespace from, may be nil - -Returns: - the string without whitespaces -*/ -func DeleteWhiteSpace(str string) string { - if str == "" { - return str - } - sz := len(str) - var chs bytes.Buffer - count := 0 - for i := 0; i < sz; i++ { - ch := rune(str[i]) - if !unicode.IsSpace(ch) { - chs.WriteRune(ch) - count++ - } - } - if count == sz { - return str - } - return chs.String() -} - -/* -IndexOfDifference compares two strings, and returns the index at which the strings begin to differ. - -Parameters: - str1 - the first string - str2 - the second string - -Returns: - the index where str1 and str2 begin to differ; -1 if they are equal -*/ -func IndexOfDifference(str1 string, str2 string) int { - if str1 == str2 { - return INDEX_NOT_FOUND - } - if IsEmpty(str1) || IsEmpty(str2) { - return 0 - } - var i int - for i = 0; i < len(str1) && i < len(str2); i++ { - if rune(str1[i]) != rune(str2[i]) { - break - } - } - if i < len(str2) || i < len(str1) { - return i - } - return INDEX_NOT_FOUND -} - -/* -IsBlank checks if a string is whitespace or empty (""). Observe the following behavior: - - goutils.IsBlank("") = true - goutils.IsBlank(" ") = true - goutils.IsBlank("bob") = false - goutils.IsBlank(" bob ") = false - -Parameter: - str - the string to check - -Returns: - true - if the string is whitespace or empty ("") -*/ -func IsBlank(str string) bool { - strLen := len(str) - if str == "" || strLen == 0 { - return true - } - for i := 0; i < strLen; i++ { - if unicode.IsSpace(rune(str[i])) == false { - return false - } - } - return true -} - -/* -IndexOf returns the index of the first instance of sub in str, with the search beginning from the -index start point specified. -1 is returned if sub is not present in str. - -An empty string ("") will return -1 (INDEX_NOT_FOUND). A negative start position is treated as zero. -A start position greater than the string length returns -1. - -Parameters: - str - the string to check - sub - the substring to find - start - the start position; negative treated as zero - -Returns: - the first index where the sub string was found (always >= start) -*/ -func IndexOf(str string, sub string, start int) int { - - if start < 0 { - start = 0 - } - - if len(str) < start { - return INDEX_NOT_FOUND - } - - if IsEmpty(str) || IsEmpty(sub) { - return INDEX_NOT_FOUND - } - - partialIndex := strings.Index(str[start:len(str)], sub) - if partialIndex == -1 { - return INDEX_NOT_FOUND - } - return partialIndex + start -} - -// IsEmpty checks if a string is empty (""). Returns true if empty, and false otherwise. -func IsEmpty(str string) bool { - return len(str) == 0 -} diff --git a/vendor/github.com/aokoli/goutils/wordutils.go b/vendor/github.com/aokoli/goutils/wordutils.go deleted file mode 100644 index e92dd39..0000000 --- a/vendor/github.com/aokoli/goutils/wordutils.go +++ /dev/null @@ -1,356 +0,0 @@ -/* -Copyright 2014 Alexander Okoli - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -/* -Package goutils provides utility functions to manipulate strings in various ways. -The code snippets below show examples of how to use goutils. Some functions return -errors while others do not, so usage would vary as a result. - -Example: - - package main - - import ( - "fmt" - "github.com/aokoli/goutils" - ) - - func main() { - - // EXAMPLE 1: A goutils function which returns no errors - fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF" - - - - // EXAMPLE 2: A goutils function which returns an error - rand1, err1 := goutils.Random (-1, 0, 0, true, true) - - if err1 != nil { - fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...) - } else { - fmt.Println(rand1) - } - } -*/ -package goutils - -import ( - "bytes" - "strings" - "unicode" -) - -// VERSION indicates the current version of goutils -const VERSION = "1.0.0" - -/* -Wrap wraps a single line of text, identifying words by ' '. -New lines will be separated by '\n'. Very long words, such as URLs will not be wrapped. -Leading spaces on a new line are stripped. Trailing spaces are not stripped. - -Parameters: - str - the string to be word wrapped - wrapLength - the column (a column can fit only one character) to wrap the words at, less than 1 is treated as 1 - -Returns: - a line with newlines inserted -*/ -func Wrap(str string, wrapLength int) string { - return WrapCustom(str, wrapLength, "", false) -} - -/* -WrapCustom wraps a single line of text, identifying words by ' '. -Leading spaces on a new line are stripped. Trailing spaces are not stripped. - -Parameters: - str - the string to be word wrapped - wrapLength - the column number (a column can fit only one character) to wrap the words at, less than 1 is treated as 1 - newLineStr - the string to insert for a new line, "" uses '\n' - wrapLongWords - true if long words (such as URLs) should be wrapped - -Returns: - a line with newlines inserted -*/ -func WrapCustom(str string, wrapLength int, newLineStr string, wrapLongWords bool) string { - - if str == "" { - return "" - } - if newLineStr == "" { - newLineStr = "\n" // TODO Assumes "\n" is seperator. Explore SystemUtils.LINE_SEPARATOR from Apache Commons - } - if wrapLength < 1 { - wrapLength = 1 - } - - inputLineLength := len(str) - offset := 0 - - var wrappedLine bytes.Buffer - - for inputLineLength-offset > wrapLength { - - if rune(str[offset]) == ' ' { - offset++ - continue - } - - end := wrapLength + offset + 1 - spaceToWrapAt := strings.LastIndex(str[offset:end], " ") + offset - - if spaceToWrapAt >= offset { - // normal word (not longer than wrapLength) - wrappedLine.WriteString(str[offset:spaceToWrapAt]) - wrappedLine.WriteString(newLineStr) - offset = spaceToWrapAt + 1 - - } else { - // long word or URL - if wrapLongWords { - end := wrapLength + offset - // long words are wrapped one line at a time - wrappedLine.WriteString(str[offset:end]) - wrappedLine.WriteString(newLineStr) - offset += wrapLength - } else { - // long words aren't wrapped, just extended beyond limit - end := wrapLength + offset - spaceToWrapAt = strings.IndexRune(str[end:len(str)], ' ') + end - if spaceToWrapAt >= 0 { - wrappedLine.WriteString(str[offset:spaceToWrapAt]) - wrappedLine.WriteString(newLineStr) - offset = spaceToWrapAt + 1 - } else { - wrappedLine.WriteString(str[offset:len(str)]) - offset = inputLineLength - } - } - } - } - - wrappedLine.WriteString(str[offset:len(str)]) - - return wrappedLine.String() - -} - -/* -Capitalize capitalizes all the delimiter separated words in a string. Only the first letter of each word is changed. -To convert the rest of each word to lowercase at the same time, use CapitalizeFully(str string, delimiters ...rune). -The delimiters represent a set of characters understood to separate words. The first string character -and the first non-delimiter character after a delimiter will be capitalized. A "" input string returns "". -Capitalization uses the Unicode title case, normally equivalent to upper case. - -Parameters: - str - the string to capitalize - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - capitalized string -*/ -func Capitalize(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - - buffer := []rune(str) - capitalizeNext := true - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if isDelimiter(ch, delimiters...) { - capitalizeNext = true - } else if capitalizeNext { - buffer[i] = unicode.ToTitle(ch) - capitalizeNext = false - } - } - return string(buffer) - -} - -/* -CapitalizeFully converts all the delimiter separated words in a string into capitalized words, that is each word is made up of a -titlecase character and then a series of lowercase characters. The delimiters represent a set of characters understood -to separate words. The first string character and the first non-delimiter character after a delimiter will be capitalized. -Capitalization uses the Unicode title case, normally equivalent to upper case. - -Parameters: - str - the string to capitalize fully - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - capitalized string -*/ -func CapitalizeFully(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - str = strings.ToLower(str) - return Capitalize(str, delimiters...) -} - -/* -Uncapitalize uncapitalizes all the whitespace separated words in a string. Only the first letter of each word is changed. -The delimiters represent a set of characters understood to separate words. The first string character and the first non-delimiter -character after a delimiter will be uncapitalized. Whitespace is defined by unicode.IsSpace(char). - -Parameters: - str - the string to uncapitalize fully - delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter - -Returns: - uncapitalized string -*/ -func Uncapitalize(str string, delimiters ...rune) string { - - var delimLen int - - if delimiters == nil { - delimLen = -1 - } else { - delimLen = len(delimiters) - } - - if str == "" || delimLen == 0 { - return str - } - - buffer := []rune(str) - uncapitalizeNext := true // TODO Always makes capitalize/un apply to first char. - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if isDelimiter(ch, delimiters...) { - uncapitalizeNext = true - } else if uncapitalizeNext { - buffer[i] = unicode.ToLower(ch) - uncapitalizeNext = false - } - } - return string(buffer) -} - -/* -SwapCase swaps the case of a string using a word based algorithm. - -Conversion algorithm: - - Upper case character converts to Lower case - Title case character converts to Lower case - Lower case character after Whitespace or at start converts to Title case - Other Lower case character converts to Upper case - Whitespace is defined by unicode.IsSpace(char). - -Parameters: - str - the string to swap case - -Returns: - the changed string -*/ -func SwapCase(str string) string { - if str == "" { - return str - } - buffer := []rune(str) - - whitespace := true - - for i := 0; i < len(buffer); i++ { - ch := buffer[i] - if unicode.IsUpper(ch) { - buffer[i] = unicode.ToLower(ch) - whitespace = false - } else if unicode.IsTitle(ch) { - buffer[i] = unicode.ToLower(ch) - whitespace = false - } else if unicode.IsLower(ch) { - if whitespace { - buffer[i] = unicode.ToTitle(ch) - whitespace = false - } else { - buffer[i] = unicode.ToUpper(ch) - } - } else { - whitespace = unicode.IsSpace(ch) - } - } - return string(buffer) -} - -/* -Initials extracts the initial letters from each word in the string. The first letter of the string and all first -letters after the defined delimiters are returned as a new string. Their case is not changed. If the delimiters -parameter is excluded, then Whitespace is used. Whitespace is defined by unicode.IsSpacea(char). An empty delimiter array returns an empty string. - -Parameters: - str - the string to get initials from - delimiters - set of characters to determine words, exclusion of this parameter means whitespace would be delimeter -Returns: - string of initial letters -*/ -func Initials(str string, delimiters ...rune) string { - if str == "" { - return str - } - if delimiters != nil && len(delimiters) == 0 { - return "" - } - strLen := len(str) - var buf bytes.Buffer - lastWasGap := true - for i := 0; i < strLen; i++ { - ch := rune(str[i]) - - if isDelimiter(ch, delimiters...) { - lastWasGap = true - } else if lastWasGap { - buf.WriteRune(ch) - lastWasGap = false - } - } - return buf.String() -} - -// private function (lower case func name) -func isDelimiter(ch rune, delimiters ...rune) bool { - if delimiters == nil { - return unicode.IsSpace(ch) - } - for _, delimiter := range delimiters { - if ch == delimiter { - return true - } - } - return false -} diff --git a/vendor/github.com/golang/glog/LICENSE b/vendor/github.com/golang/glog/LICENSE deleted file mode 100644 index 37ec93a..0000000 --- a/vendor/github.com/golang/glog/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and -distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright -owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities -that control, are controlled by, or are under common control with that entity. -For the purposes of this definition, "control" means (i) the power, direct or -indirect, to cause the direction or management of such entity, whether by -contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising -permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including -but not limited to software source code, documentation source, and configuration -files. - -"Object" form shall mean any form resulting from mechanical transformation or -translation of a Source form, including but not limited to compiled object code, -generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made -available under the License, as indicated by a copyright notice that is included -in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that -is based on (or derived from) the Work and for which the editorial revisions, -annotations, elaborations, or other modifications represent, as a whole, an -original work of authorship. For the purposes of this License, Derivative Works -shall not include works that remain separable from, or merely link (or bind by -name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version -of the Work and any modifications or additions to that Work or Derivative Works -thereof, that is intentionally submitted to Licensor for inclusion in the Work -by the copyright owner or by an individual or Legal Entity authorized to submit -on behalf of the copyright owner. For the purposes of this definition, -"submitted" means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, and -issue tracking systems that are managed by, or on behalf of, the Licensor for -the purpose of discussing and improving the Work, but excluding communication -that is conspicuously marked or otherwise designated in writing by the copyright -owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf -of whom a Contribution has been received by Licensor and subsequently -incorporated within the Work. - -2. Grant of Copyright License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the Work and such -Derivative Works in Source or Object form. - -3. Grant of Patent License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable (except as stated in this section) patent license to make, have -made, use, offer to sell, sell, import, and otherwise transfer the Work, where -such license applies only to those patent claims licensable by such Contributor -that are necessarily infringed by their Contribution(s) alone or by combination -of their Contribution(s) with the Work to which such Contribution(s) was -submitted. If You institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work or a -Contribution incorporated within the Work constitutes direct or contributory -patent infringement, then any patent licenses granted to You under this License -for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. - -You may reproduce and distribute copies of the Work or Derivative Works thereof -in any medium, with or without modifications, and in Source or Object form, -provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of -this License; and -You must cause any modified files to carry prominent notices stating that You -changed the files; and -You must retain, in the Source form of any Derivative Works that You distribute, -all copyright, patent, trademark, and attribution notices from the Source form -of the Work, excluding those notices that do not pertain to any part of the -Derivative Works; and -If the Work includes a "NOTICE" text file as part of its distribution, then any -Derivative Works that You distribute must include a readable copy of the -attribution notices contained within such NOTICE file, excluding those notices -that do not pertain to any part of the Derivative Works, in at least one of the -following places: within a NOTICE text file distributed as part of the -Derivative Works; within the Source form or documentation, if provided along -with the Derivative Works; or, within a display generated by the Derivative -Works, if and wherever such third-party notices normally appear. The contents of -the NOTICE file are for informational purposes only and do not modify the -License. You may add Your own attribution notices within Derivative Works that -You distribute, alongside or as an addendum to the NOTICE text from the Work, -provided that such additional attribution notices cannot be construed as -modifying the License. -You may add Your own copyright statement to Your modifications and may provide -additional or different license terms and conditions for use, reproduction, or -distribution of Your modifications, or for any such Derivative Works as a whole, -provided Your use, reproduction, and distribution of the Work otherwise complies -with the conditions stated in this License. - -5. Submission of Contributions. - -Unless You explicitly state otherwise, any Contribution intentionally submitted -for inclusion in the Work by You to the Licensor shall be under the terms and -conditions of this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify the terms of -any separate license agreement you may have executed with Licensor regarding -such Contributions. - -6. Trademarks. - -This License does not grant permission to use the trade names, trademarks, -service marks, or product names of the Licensor, except as required for -reasonable and customary use in describing the origin of the Work and -reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. - -Unless required by applicable law or agreed to in writing, Licensor provides the -Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, -including, without limitation, any warranties or conditions of TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are -solely responsible for determining the appropriateness of using or -redistributing the Work and assume any risks associated with Your exercise of -permissions under this License. - -8. Limitation of Liability. - -In no event and under no legal theory, whether in tort (including negligence), -contract, or otherwise, unless required by applicable law (such as deliberate -and grossly negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, incidental, -or consequential damages of any character arising as a result of this License or -out of the use or inability to use the Work (including but not limited to -damages for loss of goodwill, work stoppage, computer failure or malfunction, or -any and all other commercial damages or losses), even if such Contributor has -been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. - -While redistributing the Work or Derivative Works thereof, You may choose to -offer, and charge a fee for, acceptance of support, warranty, indemnity, or -other liability obligations and/or rights consistent with this License. However, -in accepting such obligations, You may act only on Your own behalf and on Your -sole responsibility, not on behalf of any other Contributor, and only if You -agree to indemnify, defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason of your -accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work - -To apply the Apache License to your work, attach the following boilerplate -notice, with the fields enclosed by brackets "[]" replaced with your own -identifying information. (Don't include the brackets!) The text should be -enclosed in the appropriate comment syntax for the file format. We also -recommend that a file or class name and description of purpose be included on -the same "printed page" as the copyright notice for easier identification within -third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/golang/glog/README b/vendor/github.com/golang/glog/README deleted file mode 100644 index 387b4eb..0000000 --- a/vendor/github.com/golang/glog/README +++ /dev/null @@ -1,44 +0,0 @@ -glog -==== - -Leveled execution logs for Go. - -This is an efficient pure Go implementation of leveled logs in the -manner of the open source C++ package - https://github.com/google/glog - -By binding methods to booleans it is possible to use the log package -without paying the expense of evaluating the arguments to the log. -Through the -vmodule flag, the package also provides fine-grained -control over logging at the file level. - -The comment from glog.go introduces the ideas: - - Package glog implements logging analogous to the Google-internal - C++ INFO/ERROR/V setup. It provides functions Info, Warning, - Error, Fatal, plus formatting variants such as Infof. It - also provides V-style logging controlled by the -v and - -vmodule=file=2 flags. - - Basic examples: - - glog.Info("Prepare to repel boarders") - - glog.Fatalf("Initialization failed: %s", err) - - See the documentation for the V function for an explanation - of these examples: - - if glog.V(2) { - glog.Info("Starting transaction...") - } - - glog.V(2).Infoln("Processed", nItems, "elements") - - -The repository contains an open source version of the log package -used inside Google. The master copy of the source lives inside -Google, not here. The code in this repo is for export only and is not itself -under development. Feature requests will be ignored. - -Send bug reports to golang-nuts@googlegroups.com. diff --git a/vendor/github.com/golang/glog/glog.go b/vendor/github.com/golang/glog/glog.go deleted file mode 100644 index 54bd7af..0000000 --- a/vendor/github.com/golang/glog/glog.go +++ /dev/null @@ -1,1180 +0,0 @@ -// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/ -// -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package glog implements logging analogous to the Google-internal C++ INFO/ERROR/V setup. -// It provides functions Info, Warning, Error, Fatal, plus formatting variants such as -// Infof. It also provides V-style logging controlled by the -v and -vmodule=file=2 flags. -// -// Basic examples: -// -// glog.Info("Prepare to repel boarders") -// -// glog.Fatalf("Initialization failed: %s", err) -// -// See the documentation for the V function for an explanation of these examples: -// -// if glog.V(2) { -// glog.Info("Starting transaction...") -// } -// -// glog.V(2).Infoln("Processed", nItems, "elements") -// -// Log output is buffered and written periodically using Flush. Programs -// should call Flush before exiting to guarantee all log output is written. -// -// By default, all log statements write to files in a temporary directory. -// This package provides several flags that modify this behavior. -// As a result, flag.Parse must be called before any logging is done. -// -// -logtostderr=false -// Logs are written to standard error instead of to files. -// -alsologtostderr=false -// Logs are written to standard error as well as to files. -// -stderrthreshold=ERROR -// Log events at or above this severity are logged to standard -// error as well as to files. -// -log_dir="" -// Log files will be written to this directory instead of the -// default temporary directory. -// -// Other flags provide aids to debugging. -// -// -log_backtrace_at="" -// When set to a file and line number holding a logging statement, -// such as -// -log_backtrace_at=gopherflakes.go:234 -// a stack trace will be written to the Info log whenever execution -// hits that statement. (Unlike with -vmodule, the ".go" must be -// present.) -// -v=0 -// Enable V-leveled logging at the specified level. -// -vmodule="" -// The syntax of the argument is a comma-separated list of pattern=N, -// where pattern is a literal file name (minus the ".go" suffix) or -// "glob" pattern and N is a V level. For instance, -// -vmodule=gopher*=3 -// sets the V level to 3 in all Go files whose names begin "gopher". -// -package glog - -import ( - "bufio" - "bytes" - "errors" - "flag" - "fmt" - "io" - stdLog "log" - "os" - "path/filepath" - "runtime" - "strconv" - "strings" - "sync" - "sync/atomic" - "time" -) - -// severity identifies the sort of log: info, warning etc. It also implements -// the flag.Value interface. The -stderrthreshold flag is of type severity and -// should be modified only through the flag.Value interface. The values match -// the corresponding constants in C++. -type severity int32 // sync/atomic int32 - -// These constants identify the log levels in order of increasing severity. -// A message written to a high-severity log file is also written to each -// lower-severity log file. -const ( - infoLog severity = iota - warningLog - errorLog - fatalLog - numSeverity = 4 -) - -const severityChar = "IWEF" - -var severityName = []string{ - infoLog: "INFO", - warningLog: "WARNING", - errorLog: "ERROR", - fatalLog: "FATAL", -} - -// get returns the value of the severity. -func (s *severity) get() severity { - return severity(atomic.LoadInt32((*int32)(s))) -} - -// set sets the value of the severity. -func (s *severity) set(val severity) { - atomic.StoreInt32((*int32)(s), int32(val)) -} - -// String is part of the flag.Value interface. -func (s *severity) String() string { - return strconv.FormatInt(int64(*s), 10) -} - -// Get is part of the flag.Value interface. -func (s *severity) Get() interface{} { - return *s -} - -// Set is part of the flag.Value interface. -func (s *severity) Set(value string) error { - var threshold severity - // Is it a known name? - if v, ok := severityByName(value); ok { - threshold = v - } else { - v, err := strconv.Atoi(value) - if err != nil { - return err - } - threshold = severity(v) - } - logging.stderrThreshold.set(threshold) - return nil -} - -func severityByName(s string) (severity, bool) { - s = strings.ToUpper(s) - for i, name := range severityName { - if name == s { - return severity(i), true - } - } - return 0, false -} - -// OutputStats tracks the number of output lines and bytes written. -type OutputStats struct { - lines int64 - bytes int64 -} - -// Lines returns the number of lines written. -func (s *OutputStats) Lines() int64 { - return atomic.LoadInt64(&s.lines) -} - -// Bytes returns the number of bytes written. -func (s *OutputStats) Bytes() int64 { - return atomic.LoadInt64(&s.bytes) -} - -// Stats tracks the number of lines of output and number of bytes -// per severity level. Values must be read with atomic.LoadInt64. -var Stats struct { - Info, Warning, Error OutputStats -} - -var severityStats = [numSeverity]*OutputStats{ - infoLog: &Stats.Info, - warningLog: &Stats.Warning, - errorLog: &Stats.Error, -} - -// Level is exported because it appears in the arguments to V and is -// the type of the v flag, which can be set programmatically. -// It's a distinct type because we want to discriminate it from logType. -// Variables of type level are only changed under logging.mu. -// The -v flag is read only with atomic ops, so the state of the logging -// module is consistent. - -// Level is treated as a sync/atomic int32. - -// Level specifies a level of verbosity for V logs. *Level implements -// flag.Value; the -v flag is of type Level and should be modified -// only through the flag.Value interface. -type Level int32 - -// get returns the value of the Level. -func (l *Level) get() Level { - return Level(atomic.LoadInt32((*int32)(l))) -} - -// set sets the value of the Level. -func (l *Level) set(val Level) { - atomic.StoreInt32((*int32)(l), int32(val)) -} - -// String is part of the flag.Value interface. -func (l *Level) String() string { - return strconv.FormatInt(int64(*l), 10) -} - -// Get is part of the flag.Value interface. -func (l *Level) Get() interface{} { - return *l -} - -// Set is part of the flag.Value interface. -func (l *Level) Set(value string) error { - v, err := strconv.Atoi(value) - if err != nil { - return err - } - logging.mu.Lock() - defer logging.mu.Unlock() - logging.setVState(Level(v), logging.vmodule.filter, false) - return nil -} - -// moduleSpec represents the setting of the -vmodule flag. -type moduleSpec struct { - filter []modulePat -} - -// modulePat contains a filter for the -vmodule flag. -// It holds a verbosity level and a file pattern to match. -type modulePat struct { - pattern string - literal bool // The pattern is a literal string - level Level -} - -// match reports whether the file matches the pattern. It uses a string -// comparison if the pattern contains no metacharacters. -func (m *modulePat) match(file string) bool { - if m.literal { - return file == m.pattern - } - match, _ := filepath.Match(m.pattern, file) - return match -} - -func (m *moduleSpec) String() string { - // Lock because the type is not atomic. TODO: clean this up. - logging.mu.Lock() - defer logging.mu.Unlock() - var b bytes.Buffer - for i, f := range m.filter { - if i > 0 { - b.WriteRune(',') - } - fmt.Fprintf(&b, "%s=%d", f.pattern, f.level) - } - return b.String() -} - -// Get is part of the (Go 1.2) flag.Getter interface. It always returns nil for this flag type since the -// struct is not exported. -func (m *moduleSpec) Get() interface{} { - return nil -} - -var errVmoduleSyntax = errors.New("syntax error: expect comma-separated list of filename=N") - -// Syntax: -vmodule=recordio=2,file=1,gfs*=3 -func (m *moduleSpec) Set(value string) error { - var filter []modulePat - for _, pat := range strings.Split(value, ",") { - if len(pat) == 0 { - // Empty strings such as from a trailing comma can be ignored. - continue - } - patLev := strings.Split(pat, "=") - if len(patLev) != 2 || len(patLev[0]) == 0 || len(patLev[1]) == 0 { - return errVmoduleSyntax - } - pattern := patLev[0] - v, err := strconv.Atoi(patLev[1]) - if err != nil { - return errors.New("syntax error: expect comma-separated list of filename=N") - } - if v < 0 { - return errors.New("negative value for vmodule level") - } - if v == 0 { - continue // Ignore. It's harmless but no point in paying the overhead. - } - // TODO: check syntax of filter? - filter = append(filter, modulePat{pattern, isLiteral(pattern), Level(v)}) - } - logging.mu.Lock() - defer logging.mu.Unlock() - logging.setVState(logging.verbosity, filter, true) - return nil -} - -// isLiteral reports whether the pattern is a literal string, that is, has no metacharacters -// that require filepath.Match to be called to match the pattern. -func isLiteral(pattern string) bool { - return !strings.ContainsAny(pattern, `\*?[]`) -} - -// traceLocation represents the setting of the -log_backtrace_at flag. -type traceLocation struct { - file string - line int -} - -// isSet reports whether the trace location has been specified. -// logging.mu is held. -func (t *traceLocation) isSet() bool { - return t.line > 0 -} - -// match reports whether the specified file and line matches the trace location. -// The argument file name is the full path, not the basename specified in the flag. -// logging.mu is held. -func (t *traceLocation) match(file string, line int) bool { - if t.line != line { - return false - } - if i := strings.LastIndex(file, "/"); i >= 0 { - file = file[i+1:] - } - return t.file == file -} - -func (t *traceLocation) String() string { - // Lock because the type is not atomic. TODO: clean this up. - logging.mu.Lock() - defer logging.mu.Unlock() - return fmt.Sprintf("%s:%d", t.file, t.line) -} - -// Get is part of the (Go 1.2) flag.Getter interface. It always returns nil for this flag type since the -// struct is not exported -func (t *traceLocation) Get() interface{} { - return nil -} - -var errTraceSyntax = errors.New("syntax error: expect file.go:234") - -// Syntax: -log_backtrace_at=gopherflakes.go:234 -// Note that unlike vmodule the file extension is included here. -func (t *traceLocation) Set(value string) error { - if value == "" { - // Unset. - t.line = 0 - t.file = "" - } - fields := strings.Split(value, ":") - if len(fields) != 2 { - return errTraceSyntax - } - file, line := fields[0], fields[1] - if !strings.Contains(file, ".") { - return errTraceSyntax - } - v, err := strconv.Atoi(line) - if err != nil { - return errTraceSyntax - } - if v <= 0 { - return errors.New("negative or zero value for level") - } - logging.mu.Lock() - defer logging.mu.Unlock() - t.line = v - t.file = file - return nil -} - -// flushSyncWriter is the interface satisfied by logging destinations. -type flushSyncWriter interface { - Flush() error - Sync() error - io.Writer -} - -func init() { - flag.BoolVar(&logging.toStderr, "logtostderr", false, "log to standard error instead of files") - flag.BoolVar(&logging.alsoToStderr, "alsologtostderr", false, "log to standard error as well as files") - flag.Var(&logging.verbosity, "v", "log level for V logs") - flag.Var(&logging.stderrThreshold, "stderrthreshold", "logs at or above this threshold go to stderr") - flag.Var(&logging.vmodule, "vmodule", "comma-separated list of pattern=N settings for file-filtered logging") - flag.Var(&logging.traceLocation, "log_backtrace_at", "when logging hits line file:N, emit a stack trace") - - // Default stderrThreshold is ERROR. - logging.stderrThreshold = errorLog - - logging.setVState(0, nil, false) - go logging.flushDaemon() -} - -// Flush flushes all pending log I/O. -func Flush() { - logging.lockAndFlushAll() -} - -// loggingT collects all the global state of the logging setup. -type loggingT struct { - // Boolean flags. Not handled atomically because the flag.Value interface - // does not let us avoid the =true, and that shorthand is necessary for - // compatibility. TODO: does this matter enough to fix? Seems unlikely. - toStderr bool // The -logtostderr flag. - alsoToStderr bool // The -alsologtostderr flag. - - // Level flag. Handled atomically. - stderrThreshold severity // The -stderrthreshold flag. - - // freeList is a list of byte buffers, maintained under freeListMu. - freeList *buffer - // freeListMu maintains the free list. It is separate from the main mutex - // so buffers can be grabbed and printed to without holding the main lock, - // for better parallelization. - freeListMu sync.Mutex - - // mu protects the remaining elements of this structure and is - // used to synchronize logging. - mu sync.Mutex - // file holds writer for each of the log types. - file [numSeverity]flushSyncWriter - // pcs is used in V to avoid an allocation when computing the caller's PC. - pcs [1]uintptr - // vmap is a cache of the V Level for each V() call site, identified by PC. - // It is wiped whenever the vmodule flag changes state. - vmap map[uintptr]Level - // filterLength stores the length of the vmodule filter chain. If greater - // than zero, it means vmodule is enabled. It may be read safely - // using sync.LoadInt32, but is only modified under mu. - filterLength int32 - // traceLocation is the state of the -log_backtrace_at flag. - traceLocation traceLocation - // These flags are modified only under lock, although verbosity may be fetched - // safely using atomic.LoadInt32. - vmodule moduleSpec // The state of the -vmodule flag. - verbosity Level // V logging level, the value of the -v flag/ -} - -// buffer holds a byte Buffer for reuse. The zero value is ready for use. -type buffer struct { - bytes.Buffer - tmp [64]byte // temporary byte array for creating headers. - next *buffer -} - -var logging loggingT - -// setVState sets a consistent state for V logging. -// l.mu is held. -func (l *loggingT) setVState(verbosity Level, filter []modulePat, setFilter bool) { - // Turn verbosity off so V will not fire while we are in transition. - logging.verbosity.set(0) - // Ditto for filter length. - atomic.StoreInt32(&logging.filterLength, 0) - - // Set the new filters and wipe the pc->Level map if the filter has changed. - if setFilter { - logging.vmodule.filter = filter - logging.vmap = make(map[uintptr]Level) - } - - // Things are consistent now, so enable filtering and verbosity. - // They are enabled in order opposite to that in V. - atomic.StoreInt32(&logging.filterLength, int32(len(filter))) - logging.verbosity.set(verbosity) -} - -// getBuffer returns a new, ready-to-use buffer. -func (l *loggingT) getBuffer() *buffer { - l.freeListMu.Lock() - b := l.freeList - if b != nil { - l.freeList = b.next - } - l.freeListMu.Unlock() - if b == nil { - b = new(buffer) - } else { - b.next = nil - b.Reset() - } - return b -} - -// putBuffer returns a buffer to the free list. -func (l *loggingT) putBuffer(b *buffer) { - if b.Len() >= 256 { - // Let big buffers die a natural death. - return - } - l.freeListMu.Lock() - b.next = l.freeList - l.freeList = b - l.freeListMu.Unlock() -} - -var timeNow = time.Now // Stubbed out for testing. - -/* -header formats a log header as defined by the C++ implementation. -It returns a buffer containing the formatted header and the user's file and line number. -The depth specifies how many stack frames above lives the source line to be identified in the log message. - -Log lines have this form: - Lmmdd hh:mm:ss.uuuuuu threadid file:line] msg... -where the fields are defined as follows: - L A single character, representing the log level (eg 'I' for INFO) - mm The month (zero padded; ie May is '05') - dd The day (zero padded) - hh:mm:ss.uuuuuu Time in hours, minutes and fractional seconds - threadid The space-padded thread ID as returned by GetTID() - file The file name - line The line number - msg The user-supplied message -*/ -func (l *loggingT) header(s severity, depth int) (*buffer, string, int) { - _, file, line, ok := runtime.Caller(3 + depth) - if !ok { - file = "???" - line = 1 - } else { - slash := strings.LastIndex(file, "/") - if slash >= 0 { - file = file[slash+1:] - } - } - return l.formatHeader(s, file, line), file, line -} - -// formatHeader formats a log header using the provided file name and line number. -func (l *loggingT) formatHeader(s severity, file string, line int) *buffer { - now := timeNow() - if line < 0 { - line = 0 // not a real line number, but acceptable to someDigits - } - if s > fatalLog { - s = infoLog // for safety. - } - buf := l.getBuffer() - - // Avoid Fprintf, for speed. The format is so simple that we can do it quickly by hand. - // It's worth about 3X. Fprintf is hard. - _, month, day := now.Date() - hour, minute, second := now.Clock() - // Lmmdd hh:mm:ss.uuuuuu threadid file:line] - buf.tmp[0] = severityChar[s] - buf.twoDigits(1, int(month)) - buf.twoDigits(3, day) - buf.tmp[5] = ' ' - buf.twoDigits(6, hour) - buf.tmp[8] = ':' - buf.twoDigits(9, minute) - buf.tmp[11] = ':' - buf.twoDigits(12, second) - buf.tmp[14] = '.' - buf.nDigits(6, 15, now.Nanosecond()/1000, '0') - buf.tmp[21] = ' ' - buf.nDigits(7, 22, pid, ' ') // TODO: should be TID - buf.tmp[29] = ' ' - buf.Write(buf.tmp[:30]) - buf.WriteString(file) - buf.tmp[0] = ':' - n := buf.someDigits(1, line) - buf.tmp[n+1] = ']' - buf.tmp[n+2] = ' ' - buf.Write(buf.tmp[:n+3]) - return buf -} - -// Some custom tiny helper functions to print the log header efficiently. - -const digits = "0123456789" - -// twoDigits formats a zero-prefixed two-digit integer at buf.tmp[i]. -func (buf *buffer) twoDigits(i, d int) { - buf.tmp[i+1] = digits[d%10] - d /= 10 - buf.tmp[i] = digits[d%10] -} - -// nDigits formats an n-digit integer at buf.tmp[i], -// padding with pad on the left. -// It assumes d >= 0. -func (buf *buffer) nDigits(n, i, d int, pad byte) { - j := n - 1 - for ; j >= 0 && d > 0; j-- { - buf.tmp[i+j] = digits[d%10] - d /= 10 - } - for ; j >= 0; j-- { - buf.tmp[i+j] = pad - } -} - -// someDigits formats a zero-prefixed variable-width integer at buf.tmp[i]. -func (buf *buffer) someDigits(i, d int) int { - // Print into the top, then copy down. We know there's space for at least - // a 10-digit number. - j := len(buf.tmp) - for { - j-- - buf.tmp[j] = digits[d%10] - d /= 10 - if d == 0 { - break - } - } - return copy(buf.tmp[i:], buf.tmp[j:]) -} - -func (l *loggingT) println(s severity, args ...interface{}) { - buf, file, line := l.header(s, 0) - fmt.Fprintln(buf, args...) - l.output(s, buf, file, line, false) -} - -func (l *loggingT) print(s severity, args ...interface{}) { - l.printDepth(s, 1, args...) -} - -func (l *loggingT) printDepth(s severity, depth int, args ...interface{}) { - buf, file, line := l.header(s, depth) - fmt.Fprint(buf, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, false) -} - -func (l *loggingT) printf(s severity, format string, args ...interface{}) { - buf, file, line := l.header(s, 0) - fmt.Fprintf(buf, format, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, false) -} - -// printWithFileLine behaves like print but uses the provided file and line number. If -// alsoLogToStderr is true, the log message always appears on standard error; it -// will also appear in the log file unless --logtostderr is set. -func (l *loggingT) printWithFileLine(s severity, file string, line int, alsoToStderr bool, args ...interface{}) { - buf := l.formatHeader(s, file, line) - fmt.Fprint(buf, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, alsoToStderr) -} - -// output writes the data to the log files and releases the buffer. -func (l *loggingT) output(s severity, buf *buffer, file string, line int, alsoToStderr bool) { - l.mu.Lock() - if l.traceLocation.isSet() { - if l.traceLocation.match(file, line) { - buf.Write(stacks(false)) - } - } - data := buf.Bytes() - if !flag.Parsed() { - os.Stderr.Write([]byte("ERROR: logging before flag.Parse: ")) - os.Stderr.Write(data) - } else if l.toStderr { - os.Stderr.Write(data) - } else { - if alsoToStderr || l.alsoToStderr || s >= l.stderrThreshold.get() { - os.Stderr.Write(data) - } - if l.file[s] == nil { - if err := l.createFiles(s); err != nil { - os.Stderr.Write(data) // Make sure the message appears somewhere. - l.exit(err) - } - } - switch s { - case fatalLog: - l.file[fatalLog].Write(data) - fallthrough - case errorLog: - l.file[errorLog].Write(data) - fallthrough - case warningLog: - l.file[warningLog].Write(data) - fallthrough - case infoLog: - l.file[infoLog].Write(data) - } - } - if s == fatalLog { - // If we got here via Exit rather than Fatal, print no stacks. - if atomic.LoadUint32(&fatalNoStacks) > 0 { - l.mu.Unlock() - timeoutFlush(10 * time.Second) - os.Exit(1) - } - // Dump all goroutine stacks before exiting. - // First, make sure we see the trace for the current goroutine on standard error. - // If -logtostderr has been specified, the loop below will do that anyway - // as the first stack in the full dump. - if !l.toStderr { - os.Stderr.Write(stacks(false)) - } - // Write the stack trace for all goroutines to the files. - trace := stacks(true) - logExitFunc = func(error) {} // If we get a write error, we'll still exit below. - for log := fatalLog; log >= infoLog; log-- { - if f := l.file[log]; f != nil { // Can be nil if -logtostderr is set. - f.Write(trace) - } - } - l.mu.Unlock() - timeoutFlush(10 * time.Second) - os.Exit(255) // C++ uses -1, which is silly because it's anded with 255 anyway. - } - l.putBuffer(buf) - l.mu.Unlock() - if stats := severityStats[s]; stats != nil { - atomic.AddInt64(&stats.lines, 1) - atomic.AddInt64(&stats.bytes, int64(len(data))) - } -} - -// timeoutFlush calls Flush and returns when it completes or after timeout -// elapses, whichever happens first. This is needed because the hooks invoked -// by Flush may deadlock when glog.Fatal is called from a hook that holds -// a lock. -func timeoutFlush(timeout time.Duration) { - done := make(chan bool, 1) - go func() { - Flush() // calls logging.lockAndFlushAll() - done <- true - }() - select { - case <-done: - case <-time.After(timeout): - fmt.Fprintln(os.Stderr, "glog: Flush took longer than", timeout) - } -} - -// stacks is a wrapper for runtime.Stack that attempts to recover the data for all goroutines. -func stacks(all bool) []byte { - // We don't know how big the traces are, so grow a few times if they don't fit. Start large, though. - n := 10000 - if all { - n = 100000 - } - var trace []byte - for i := 0; i < 5; i++ { - trace = make([]byte, n) - nbytes := runtime.Stack(trace, all) - if nbytes < len(trace) { - return trace[:nbytes] - } - n *= 2 - } - return trace -} - -// logExitFunc provides a simple mechanism to override the default behavior -// of exiting on error. Used in testing and to guarantee we reach a required exit -// for fatal logs. Instead, exit could be a function rather than a method but that -// would make its use clumsier. -var logExitFunc func(error) - -// exit is called if there is trouble creating or writing log files. -// It flushes the logs and exits the program; there's no point in hanging around. -// l.mu is held. -func (l *loggingT) exit(err error) { - fmt.Fprintf(os.Stderr, "log: exiting because of error: %s\n", err) - // If logExitFunc is set, we do that instead of exiting. - if logExitFunc != nil { - logExitFunc(err) - return - } - l.flushAll() - os.Exit(2) -} - -// syncBuffer joins a bufio.Writer to its underlying file, providing access to the -// file's Sync method and providing a wrapper for the Write method that provides log -// file rotation. There are conflicting methods, so the file cannot be embedded. -// l.mu is held for all its methods. -type syncBuffer struct { - logger *loggingT - *bufio.Writer - file *os.File - sev severity - nbytes uint64 // The number of bytes written to this file -} - -func (sb *syncBuffer) Sync() error { - return sb.file.Sync() -} - -func (sb *syncBuffer) Write(p []byte) (n int, err error) { - if sb.nbytes+uint64(len(p)) >= MaxSize { - if err := sb.rotateFile(time.Now()); err != nil { - sb.logger.exit(err) - } - } - n, err = sb.Writer.Write(p) - sb.nbytes += uint64(n) - if err != nil { - sb.logger.exit(err) - } - return -} - -// rotateFile closes the syncBuffer's file and starts a new one. -func (sb *syncBuffer) rotateFile(now time.Time) error { - if sb.file != nil { - sb.Flush() - sb.file.Close() - } - var err error - sb.file, _, err = create(severityName[sb.sev], now) - sb.nbytes = 0 - if err != nil { - return err - } - - sb.Writer = bufio.NewWriterSize(sb.file, bufferSize) - - // Write header. - var buf bytes.Buffer - fmt.Fprintf(&buf, "Log file created at: %s\n", now.Format("2006/01/02 15:04:05")) - fmt.Fprintf(&buf, "Running on machine: %s\n", host) - fmt.Fprintf(&buf, "Binary: Built with %s %s for %s/%s\n", runtime.Compiler, runtime.Version(), runtime.GOOS, runtime.GOARCH) - fmt.Fprintf(&buf, "Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg\n") - n, err := sb.file.Write(buf.Bytes()) - sb.nbytes += uint64(n) - return err -} - -// bufferSize sizes the buffer associated with each log file. It's large -// so that log records can accumulate without the logging thread blocking -// on disk I/O. The flushDaemon will block instead. -const bufferSize = 256 * 1024 - -// createFiles creates all the log files for severity from sev down to infoLog. -// l.mu is held. -func (l *loggingT) createFiles(sev severity) error { - now := time.Now() - // Files are created in decreasing severity order, so as soon as we find one - // has already been created, we can stop. - for s := sev; s >= infoLog && l.file[s] == nil; s-- { - sb := &syncBuffer{ - logger: l, - sev: s, - } - if err := sb.rotateFile(now); err != nil { - return err - } - l.file[s] = sb - } - return nil -} - -const flushInterval = 30 * time.Second - -// flushDaemon periodically flushes the log file buffers. -func (l *loggingT) flushDaemon() { - for _ = range time.NewTicker(flushInterval).C { - l.lockAndFlushAll() - } -} - -// lockAndFlushAll is like flushAll but locks l.mu first. -func (l *loggingT) lockAndFlushAll() { - l.mu.Lock() - l.flushAll() - l.mu.Unlock() -} - -// flushAll flushes all the logs and attempts to "sync" their data to disk. -// l.mu is held. -func (l *loggingT) flushAll() { - // Flush from fatal down, in case there's trouble flushing. - for s := fatalLog; s >= infoLog; s-- { - file := l.file[s] - if file != nil { - file.Flush() // ignore error - file.Sync() // ignore error - } - } -} - -// CopyStandardLogTo arranges for messages written to the Go "log" package's -// default logs to also appear in the Google logs for the named and lower -// severities. Subsequent changes to the standard log's default output location -// or format may break this behavior. -// -// Valid names are "INFO", "WARNING", "ERROR", and "FATAL". If the name is not -// recognized, CopyStandardLogTo panics. -func CopyStandardLogTo(name string) { - sev, ok := severityByName(name) - if !ok { - panic(fmt.Sprintf("log.CopyStandardLogTo(%q): unrecognized severity name", name)) - } - // Set a log format that captures the user's file and line: - // d.go:23: message - stdLog.SetFlags(stdLog.Lshortfile) - stdLog.SetOutput(logBridge(sev)) -} - -// logBridge provides the Write method that enables CopyStandardLogTo to connect -// Go's standard logs to the logs provided by this package. -type logBridge severity - -// Write parses the standard logging line and passes its components to the -// logger for severity(lb). -func (lb logBridge) Write(b []byte) (n int, err error) { - var ( - file = "???" - line = 1 - text string - ) - // Split "d.go:23: message" into "d.go", "23", and "message". - if parts := bytes.SplitN(b, []byte{':'}, 3); len(parts) != 3 || len(parts[0]) < 1 || len(parts[2]) < 1 { - text = fmt.Sprintf("bad log format: %s", b) - } else { - file = string(parts[0]) - text = string(parts[2][1:]) // skip leading space - line, err = strconv.Atoi(string(parts[1])) - if err != nil { - text = fmt.Sprintf("bad line number: %s", b) - line = 1 - } - } - // printWithFileLine with alsoToStderr=true, so standard log messages - // always appear on standard error. - logging.printWithFileLine(severity(lb), file, line, true, text) - return len(b), nil -} - -// setV computes and remembers the V level for a given PC -// when vmodule is enabled. -// File pattern matching takes the basename of the file, stripped -// of its .go suffix, and uses filepath.Match, which is a little more -// general than the *? matching used in C++. -// l.mu is held. -func (l *loggingT) setV(pc uintptr) Level { - fn := runtime.FuncForPC(pc) - file, _ := fn.FileLine(pc) - // The file is something like /a/b/c/d.go. We want just the d. - if strings.HasSuffix(file, ".go") { - file = file[:len(file)-3] - } - if slash := strings.LastIndex(file, "/"); slash >= 0 { - file = file[slash+1:] - } - for _, filter := range l.vmodule.filter { - if filter.match(file) { - l.vmap[pc] = filter.level - return filter.level - } - } - l.vmap[pc] = 0 - return 0 -} - -// Verbose is a boolean type that implements Infof (like Printf) etc. -// See the documentation of V for more information. -type Verbose bool - -// V reports whether verbosity at the call site is at least the requested level. -// The returned value is a boolean of type Verbose, which implements Info, Infoln -// and Infof. These methods will write to the Info log if called. -// Thus, one may write either -// if glog.V(2) { glog.Info("log this") } -// or -// glog.V(2).Info("log this") -// The second form is shorter but the first is cheaper if logging is off because it does -// not evaluate its arguments. -// -// Whether an individual call to V generates a log record depends on the setting of -// the -v and --vmodule flags; both are off by default. If the level in the call to -// V is at least the value of -v, or of -vmodule for the source file containing the -// call, the V call will log. -func V(level Level) Verbose { - // This function tries hard to be cheap unless there's work to do. - // The fast path is two atomic loads and compares. - - // Here is a cheap but safe test to see if V logging is enabled globally. - if logging.verbosity.get() >= level { - return Verbose(true) - } - - // It's off globally but it vmodule may still be set. - // Here is another cheap but safe test to see if vmodule is enabled. - if atomic.LoadInt32(&logging.filterLength) > 0 { - // Now we need a proper lock to use the logging structure. The pcs field - // is shared so we must lock before accessing it. This is fairly expensive, - // but if V logging is enabled we're slow anyway. - logging.mu.Lock() - defer logging.mu.Unlock() - if runtime.Callers(2, logging.pcs[:]) == 0 { - return Verbose(false) - } - v, ok := logging.vmap[logging.pcs[0]] - if !ok { - v = logging.setV(logging.pcs[0]) - } - return Verbose(v >= level) - } - return Verbose(false) -} - -// Info is equivalent to the global Info function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Info(args ...interface{}) { - if v { - logging.print(infoLog, args...) - } -} - -// Infoln is equivalent to the global Infoln function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Infoln(args ...interface{}) { - if v { - logging.println(infoLog, args...) - } -} - -// Infof is equivalent to the global Infof function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Infof(format string, args ...interface{}) { - if v { - logging.printf(infoLog, format, args...) - } -} - -// Info logs to the INFO log. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Info(args ...interface{}) { - logging.print(infoLog, args...) -} - -// InfoDepth acts as Info but uses depth to determine which call frame to log. -// InfoDepth(0, "msg") is the same as Info("msg"). -func InfoDepth(depth int, args ...interface{}) { - logging.printDepth(infoLog, depth, args...) -} - -// Infoln logs to the INFO log. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Infoln(args ...interface{}) { - logging.println(infoLog, args...) -} - -// Infof logs to the INFO log. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Infof(format string, args ...interface{}) { - logging.printf(infoLog, format, args...) -} - -// Warning logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Warning(args ...interface{}) { - logging.print(warningLog, args...) -} - -// WarningDepth acts as Warning but uses depth to determine which call frame to log. -// WarningDepth(0, "msg") is the same as Warning("msg"). -func WarningDepth(depth int, args ...interface{}) { - logging.printDepth(warningLog, depth, args...) -} - -// Warningln logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Warningln(args ...interface{}) { - logging.println(warningLog, args...) -} - -// Warningf logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Warningf(format string, args ...interface{}) { - logging.printf(warningLog, format, args...) -} - -// Error logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Error(args ...interface{}) { - logging.print(errorLog, args...) -} - -// ErrorDepth acts as Error but uses depth to determine which call frame to log. -// ErrorDepth(0, "msg") is the same as Error("msg"). -func ErrorDepth(depth int, args ...interface{}) { - logging.printDepth(errorLog, depth, args...) -} - -// Errorln logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Errorln(args ...interface{}) { - logging.println(errorLog, args...) -} - -// Errorf logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Errorf(format string, args ...interface{}) { - logging.printf(errorLog, format, args...) -} - -// Fatal logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Fatal(args ...interface{}) { - logging.print(fatalLog, args...) -} - -// FatalDepth acts as Fatal but uses depth to determine which call frame to log. -// FatalDepth(0, "msg") is the same as Fatal("msg"). -func FatalDepth(depth int, args ...interface{}) { - logging.printDepth(fatalLog, depth, args...) -} - -// Fatalln logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Fatalln(args ...interface{}) { - logging.println(fatalLog, args...) -} - -// Fatalf logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Fatalf(format string, args ...interface{}) { - logging.printf(fatalLog, format, args...) -} - -// fatalNoStacks is non-zero if we are to exit without dumping goroutine stacks. -// It allows Exit and relatives to use the Fatal logs. -var fatalNoStacks uint32 - -// Exit logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Exit(args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.print(fatalLog, args...) -} - -// ExitDepth acts as Exit but uses depth to determine which call frame to log. -// ExitDepth(0, "msg") is the same as Exit("msg"). -func ExitDepth(depth int, args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.printDepth(fatalLog, depth, args...) -} - -// Exitln logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -func Exitln(args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.println(fatalLog, args...) -} - -// Exitf logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Exitf(format string, args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.printf(fatalLog, format, args...) -} diff --git a/vendor/github.com/golang/glog/glog_file.go b/vendor/github.com/golang/glog/glog_file.go deleted file mode 100644 index 65075d2..0000000 --- a/vendor/github.com/golang/glog/glog_file.go +++ /dev/null @@ -1,124 +0,0 @@ -// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/ -// -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// File I/O for logs. - -package glog - -import ( - "errors" - "flag" - "fmt" - "os" - "os/user" - "path/filepath" - "strings" - "sync" - "time" -) - -// MaxSize is the maximum size of a log file in bytes. -var MaxSize uint64 = 1024 * 1024 * 1800 - -// logDirs lists the candidate directories for new log files. -var logDirs []string - -// If non-empty, overrides the choice of directory in which to write logs. -// See createLogDirs for the full list of possible destinations. -var logDir = flag.String("log_dir", "", "If non-empty, write log files in this directory") - -func createLogDirs() { - if *logDir != "" { - logDirs = append(logDirs, *logDir) - } - logDirs = append(logDirs, os.TempDir()) -} - -var ( - pid = os.Getpid() - program = filepath.Base(os.Args[0]) - host = "unknownhost" - userName = "unknownuser" -) - -func init() { - h, err := os.Hostname() - if err == nil { - host = shortHostname(h) - } - - current, err := user.Current() - if err == nil { - userName = current.Username - } - - // Sanitize userName since it may contain filepath separators on Windows. - userName = strings.Replace(userName, `\`, "_", -1) -} - -// shortHostname returns its argument, truncating at the first period. -// For instance, given "www.google.com" it returns "www". -func shortHostname(hostname string) string { - if i := strings.Index(hostname, "."); i >= 0 { - return hostname[:i] - } - return hostname -} - -// logName returns a new log file name containing tag, with start time t, and -// the name for the symlink for tag. -func logName(tag string, t time.Time) (name, link string) { - name = fmt.Sprintf("%s.%s.%s.log.%s.%04d%02d%02d-%02d%02d%02d.%d", - program, - host, - userName, - tag, - t.Year(), - t.Month(), - t.Day(), - t.Hour(), - t.Minute(), - t.Second(), - pid) - return name, program + "." + tag -} - -var onceLogDirs sync.Once - -// create creates a new log file and returns the file and its filename, which -// contains tag ("INFO", "FATAL", etc.) and t. If the file is created -// successfully, create also attempts to update the symlink for that tag, ignoring -// errors. -func create(tag string, t time.Time) (f *os.File, filename string, err error) { - onceLogDirs.Do(createLogDirs) - if len(logDirs) == 0 { - return nil, "", errors.New("log: no log dirs") - } - name, link := logName(tag, t) - var lastErr error - for _, dir := range logDirs { - fname := filepath.Join(dir, name) - f, err := os.Create(fname) - if err == nil { - symlink := filepath.Join(dir, link) - os.Remove(symlink) // ignore err - os.Symlink(name, symlink) // ignore err - return f, fname, nil - } - lastErr = err - } - return nil, "", fmt.Errorf("log: cannot create log: %v", lastErr) -} diff --git a/vendor/github.com/golang/protobuf/AUTHORS b/vendor/github.com/golang/protobuf/AUTHORS deleted file mode 100644 index 15167cd..0000000 --- a/vendor/github.com/golang/protobuf/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/github.com/golang/protobuf/CONTRIBUTORS b/vendor/github.com/golang/protobuf/CONTRIBUTORS deleted file mode 100644 index 1c4577e..0000000 --- a/vendor/github.com/golang/protobuf/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/github.com/golang/protobuf/LICENSE b/vendor/github.com/golang/protobuf/LICENSE deleted file mode 100644 index 1b1b192..0000000 --- a/vendor/github.com/golang/protobuf/LICENSE +++ /dev/null @@ -1,31 +0,0 @@ -Go support for Protocol Buffers - Google's data interchange format - -Copyright 2010 The Go Authors. All rights reserved. -https://github.com/golang/protobuf - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/vendor/github.com/golang/protobuf/proto/Makefile b/vendor/github.com/golang/protobuf/proto/Makefile deleted file mode 100644 index e2e0651..0000000 --- a/vendor/github.com/golang/protobuf/proto/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -install: - go install - -test: install generate-test-pbs - go test - - -generate-test-pbs: - make install - make -C testdata - protoc --go_out=Mtestdata/test.proto=github.com/golang/protobuf/proto/testdata,Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. proto3_proto/proto3.proto - make diff --git a/vendor/github.com/golang/protobuf/proto/clone.go b/vendor/github.com/golang/protobuf/proto/clone.go deleted file mode 100644 index e392575..0000000 --- a/vendor/github.com/golang/protobuf/proto/clone.go +++ /dev/null @@ -1,229 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer deep copy and merge. -// TODO: RawMessage. - -package proto - -import ( - "log" - "reflect" - "strings" -) - -// Clone returns a deep copy of a protocol buffer. -func Clone(pb Message) Message { - in := reflect.ValueOf(pb) - if in.IsNil() { - return pb - } - - out := reflect.New(in.Type().Elem()) - // out is empty so a merge is a deep copy. - mergeStruct(out.Elem(), in.Elem()) - return out.Interface().(Message) -} - -// Merge merges src into dst. -// Required and optional fields that are set in src will be set to that value in dst. -// Elements of repeated fields will be appended. -// Merge panics if src and dst are not the same type, or if dst is nil. -func Merge(dst, src Message) { - in := reflect.ValueOf(src) - out := reflect.ValueOf(dst) - if out.IsNil() { - panic("proto: nil destination") - } - if in.Type() != out.Type() { - // Explicit test prior to mergeStruct so that mistyped nils will fail - panic("proto: type mismatch") - } - if in.IsNil() { - // Merging nil into non-nil is a quiet no-op - return - } - mergeStruct(out.Elem(), in.Elem()) -} - -func mergeStruct(out, in reflect.Value) { - sprop := GetProperties(in.Type()) - for i := 0; i < in.NumField(); i++ { - f := in.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) - } - - if emIn, ok := extendable(in.Addr().Interface()); ok { - emOut, _ := extendable(out.Addr().Interface()) - mIn, muIn := emIn.extensionsRead() - if mIn != nil { - mOut := emOut.extensionsWrite() - muIn.Lock() - mergeExtension(mOut, mIn) - muIn.Unlock() - } - } - - uf := in.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return - } - uin := uf.Bytes() - if len(uin) > 0 { - out.FieldByName("XXX_unrecognized").SetBytes(append([]byte(nil), uin...)) - } -} - -// mergeAny performs a merge between two values of the same type. -// viaPtr indicates whether the values were indirected through a pointer (implying proto2). -// prop is set if this is a struct field (it may be nil). -func mergeAny(out, in reflect.Value, viaPtr bool, prop *Properties) { - if in.Type() == protoMessageType { - if !in.IsNil() { - if out.IsNil() { - out.Set(reflect.ValueOf(Clone(in.Interface().(Message)))) - } else { - Merge(out.Interface().(Message), in.Interface().(Message)) - } - } - return - } - switch in.Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - if !viaPtr && isProto3Zero(in) { - return - } - out.Set(in) - case reflect.Interface: - // Probably a oneof field; copy non-nil values. - if in.IsNil() { - return - } - // Allocate destination if it is not set, or set to a different type. - // Otherwise we will merge as normal. - if out.IsNil() || out.Elem().Type() != in.Elem().Type() { - out.Set(reflect.New(in.Elem().Elem().Type())) // interface -> *T -> T -> new(T) - } - mergeAny(out.Elem(), in.Elem(), false, nil) - case reflect.Map: - if in.Len() == 0 { - return - } - if out.IsNil() { - out.Set(reflect.MakeMap(in.Type())) - } - // For maps with value types of *T or []byte we need to deep copy each value. - elemKind := in.Type().Elem().Kind() - for _, key := range in.MapKeys() { - var val reflect.Value - switch elemKind { - case reflect.Ptr: - val = reflect.New(in.Type().Elem().Elem()) - mergeAny(val, in.MapIndex(key), false, nil) - case reflect.Slice: - val = in.MapIndex(key) - val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) - default: - val = in.MapIndex(key) - } - out.SetMapIndex(key, val) - } - case reflect.Ptr: - if in.IsNil() { - return - } - if out.IsNil() { - out.Set(reflect.New(in.Elem().Type())) - } - mergeAny(out.Elem(), in.Elem(), true, nil) - case reflect.Slice: - if in.IsNil() { - return - } - if in.Type().Elem().Kind() == reflect.Uint8 { - // []byte is a scalar bytes field, not a repeated field. - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value, and should not - // be merged. - if prop != nil && prop.proto3 && in.Len() == 0 { - return - } - - // Make a deep copy. - // Append to []byte{} instead of []byte(nil) so that we never end up - // with a nil result. - out.SetBytes(append([]byte{}, in.Bytes()...)) - return - } - n := in.Len() - if out.IsNil() { - out.Set(reflect.MakeSlice(in.Type(), 0, n)) - } - switch in.Type().Elem().Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - out.Set(reflect.AppendSlice(out, in)) - default: - for i := 0; i < n; i++ { - x := reflect.Indirect(reflect.New(in.Type().Elem())) - mergeAny(x, in.Index(i), false, nil) - out.Set(reflect.Append(out, x)) - } - } - case reflect.Struct: - mergeStruct(out, in) - default: - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to copy %v", in) - } -} - -func mergeExtension(out, in map[int32]Extension) { - for extNum, eIn := range in { - eOut := Extension{desc: eIn.desc} - if eIn.value != nil { - v := reflect.New(reflect.TypeOf(eIn.value)).Elem() - mergeAny(v, reflect.ValueOf(eIn.value), false, nil) - eOut.value = v.Interface() - } - if eIn.enc != nil { - eOut.enc = make([]byte, len(eIn.enc)) - copy(eOut.enc, eIn.enc) - } - - out[extNum] = eOut - } -} diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go deleted file mode 100644 index aa20729..0000000 --- a/vendor/github.com/golang/protobuf/proto/decode.go +++ /dev/null @@ -1,970 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for decoding protocol buffer data to construct in-memory representations. - */ - -import ( - "errors" - "fmt" - "io" - "os" - "reflect" -) - -// errOverflow is returned when an integer is too large to be represented. -var errOverflow = errors.New("proto: integer overflow") - -// ErrInternalBadWireType is returned by generated code when an incorrect -// wire type is encountered. It does not get returned to user code. -var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") - -// The fundamental decoders that interpret bytes on the wire. -// Those that take integer types all return uint64 and are -// therefore of type valueDecoder. - -// DecodeVarint reads a varint-encoded integer from the slice. -// It returns the integer and the number of bytes consumed, or -// zero if there is not enough. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func DecodeVarint(buf []byte) (x uint64, n int) { - for shift := uint(0); shift < 64; shift += 7 { - if n >= len(buf) { - return 0, 0 - } - b := uint64(buf[n]) - n++ - x |= (b & 0x7F) << shift - if (b & 0x80) == 0 { - return x, n - } - } - - // The number is too large to represent in a 64-bit value. - return 0, 0 -} - -func (p *Buffer) decodeVarintSlow() (x uint64, err error) { - i := p.index - l := len(p.buf) - - for shift := uint(0); shift < 64; shift += 7 { - if i >= l { - err = io.ErrUnexpectedEOF - return - } - b := p.buf[i] - i++ - x |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - p.index = i - return - } - } - - // The number is too large to represent in a 64-bit value. - err = errOverflow - return -} - -// DecodeVarint reads a varint-encoded integer from the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) DecodeVarint() (x uint64, err error) { - i := p.index - buf := p.buf - - if i >= len(buf) { - return 0, io.ErrUnexpectedEOF - } else if buf[i] < 0x80 { - p.index++ - return uint64(buf[i]), nil - } else if len(buf)-i < 10 { - return p.decodeVarintSlow() - } - - var b uint64 - // we already checked the first byte - x = uint64(buf[i]) - 0x80 - i++ - - b = uint64(buf[i]) - i++ - x += b << 7 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 7 - - b = uint64(buf[i]) - i++ - x += b << 14 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 14 - - b = uint64(buf[i]) - i++ - x += b << 21 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 21 - - b = uint64(buf[i]) - i++ - x += b << 28 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 28 - - b = uint64(buf[i]) - i++ - x += b << 35 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 35 - - b = uint64(buf[i]) - i++ - x += b << 42 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 42 - - b = uint64(buf[i]) - i++ - x += b << 49 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 49 - - b = uint64(buf[i]) - i++ - x += b << 56 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 56 - - b = uint64(buf[i]) - i++ - x += b << 63 - if b&0x80 == 0 { - goto done - } - // x -= 0x80 << 63 // Always zero. - - return 0, errOverflow - -done: - p.index = i - return x, nil -} - -// DecodeFixed64 reads a 64-bit integer from the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) DecodeFixed64() (x uint64, err error) { - // x, err already 0 - i := p.index + 8 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-8]) - x |= uint64(p.buf[i-7]) << 8 - x |= uint64(p.buf[i-6]) << 16 - x |= uint64(p.buf[i-5]) << 24 - x |= uint64(p.buf[i-4]) << 32 - x |= uint64(p.buf[i-3]) << 40 - x |= uint64(p.buf[i-2]) << 48 - x |= uint64(p.buf[i-1]) << 56 - return -} - -// DecodeFixed32 reads a 32-bit integer from the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) DecodeFixed32() (x uint64, err error) { - // x, err already 0 - i := p.index + 4 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-4]) - x |= uint64(p.buf[i-3]) << 8 - x |= uint64(p.buf[i-2]) << 16 - x |= uint64(p.buf[i-1]) << 24 - return -} - -// DecodeZigzag64 reads a zigzag-encoded 64-bit integer -// from the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) DecodeZigzag64() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = (x >> 1) ^ uint64((int64(x&1)<<63)>>63) - return -} - -// DecodeZigzag32 reads a zigzag-encoded 32-bit integer -// from the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) DecodeZigzag32() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = uint64((uint32(x) >> 1) ^ uint32((int32(x&1)<<31)>>31)) - return -} - -// These are not ValueDecoders: they produce an array of bytes or a string. -// bytes, embedded messages - -// DecodeRawBytes reads a count-delimited byte buffer from the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) DecodeRawBytes(alloc bool) (buf []byte, err error) { - n, err := p.DecodeVarint() - if err != nil { - return nil, err - } - - nb := int(n) - if nb < 0 { - return nil, fmt.Errorf("proto: bad byte length %d", nb) - } - end := p.index + nb - if end < p.index || end > len(p.buf) { - return nil, io.ErrUnexpectedEOF - } - - if !alloc { - // todo: check if can get more uses of alloc=false - buf = p.buf[p.index:end] - p.index += nb - return - } - - buf = make([]byte, nb) - copy(buf, p.buf[p.index:]) - p.index += nb - return -} - -// DecodeStringBytes reads an encoded string from the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) DecodeStringBytes() (s string, err error) { - buf, err := p.DecodeRawBytes(false) - if err != nil { - return - } - return string(buf), nil -} - -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -// If the protocol buffer has extensions, and the field matches, add it as an extension. -// Otherwise, if the XXX_unrecognized field exists, append the skipped data there. -func (o *Buffer) skipAndSave(t reflect.Type, tag, wire int, base structPointer, unrecField field) error { - oi := o.index - - err := o.skip(t, tag, wire) - if err != nil { - return err - } - - if !unrecField.IsValid() { - return nil - } - - ptr := structPointer_Bytes(base, unrecField) - - // Add the skipped field to struct field - obuf := o.buf - - o.buf = *ptr - o.EncodeVarint(uint64(tag<<3 | wire)) - *ptr = append(o.buf, obuf[oi:o.index]...) - - o.buf = obuf - - return nil -} - -// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. -func (o *Buffer) skip(t reflect.Type, tag, wire int) error { - - var u uint64 - var err error - - switch wire { - case WireVarint: - _, err = o.DecodeVarint() - case WireFixed64: - _, err = o.DecodeFixed64() - case WireBytes: - _, err = o.DecodeRawBytes(false) - case WireFixed32: - _, err = o.DecodeFixed32() - case WireStartGroup: - for { - u, err = o.DecodeVarint() - if err != nil { - break - } - fwire := int(u & 0x7) - if fwire == WireEndGroup { - break - } - ftag := int(u >> 3) - err = o.skip(t, ftag, fwire) - if err != nil { - break - } - } - default: - err = fmt.Errorf("proto: can't skip unknown wire type %d for %s", wire, t) - } - return err -} - -// Unmarshaler is the interface representing objects that can -// unmarshal themselves. The method should reset the receiver before -// decoding starts. The argument points to data that may be -// overwritten, so implementations should not keep references to the -// buffer. -type Unmarshaler interface { - Unmarshal([]byte) error -} - -// Unmarshal parses the protocol buffer representation in buf and places the -// decoded result in pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// Unmarshal resets pb before starting to unmarshal, so any -// existing data in pb is always removed. Use UnmarshalMerge -// to preserve and append to existing data. -func Unmarshal(buf []byte, pb Message) error { - pb.Reset() - return UnmarshalMerge(buf, pb) -} - -// UnmarshalMerge parses the protocol buffer representation in buf and -// writes the decoded result to pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// UnmarshalMerge merges into existing data in pb. -// Most code should use Unmarshal instead. -func UnmarshalMerge(buf []byte, pb Message) error { - // If the object can unmarshal itself, let it. - if u, ok := pb.(Unmarshaler); ok { - return u.Unmarshal(buf) - } - return NewBuffer(buf).Unmarshal(pb) -} - -// DecodeMessage reads a count-delimited message from the Buffer. -func (p *Buffer) DecodeMessage(pb Message) error { - enc, err := p.DecodeRawBytes(false) - if err != nil { - return err - } - return NewBuffer(enc).Unmarshal(pb) -} - -// DecodeGroup reads a tag-delimited group from the Buffer. -func (p *Buffer) DecodeGroup(pb Message) error { - typ, base, err := getbase(pb) - if err != nil { - return err - } - return p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), true, base) -} - -// Unmarshal parses the protocol buffer representation in the -// Buffer and places the decoded result in pb. If the struct -// underlying pb does not match the data in the buffer, the results can be -// unpredictable. -// -// Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. -func (p *Buffer) Unmarshal(pb Message) error { - // If the object can unmarshal itself, let it. - if u, ok := pb.(Unmarshaler); ok { - err := u.Unmarshal(p.buf[p.index:]) - p.index = len(p.buf) - return err - } - - typ, base, err := getbase(pb) - if err != nil { - return err - } - - err = p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), false, base) - - if collectStats { - stats.Decode++ - } - - return err -} - -// unmarshalType does the work of unmarshaling a structure. -func (o *Buffer) unmarshalType(st reflect.Type, prop *StructProperties, is_group bool, base structPointer) error { - var state errorState - required, reqFields := prop.reqCount, uint64(0) - - var err error - for err == nil && o.index < len(o.buf) { - oi := o.index - var u uint64 - u, err = o.DecodeVarint() - if err != nil { - break - } - wire := int(u & 0x7) - if wire == WireEndGroup { - if is_group { - if required > 0 { - // Not enough information to determine the exact field. - // (See below.) - return &RequiredNotSetError{"{Unknown}"} - } - return nil // input is satisfied - } - return fmt.Errorf("proto: %s: wiretype end group for non-group", st) - } - tag := int(u >> 3) - if tag <= 0 { - return fmt.Errorf("proto: %s: illegal tag %d (wire type %d)", st, tag, wire) - } - fieldnum, ok := prop.decoderTags.get(tag) - if !ok { - // Maybe it's an extension? - if prop.extendable { - if e, _ := extendable(structPointer_Interface(base, st)); isExtensionField(e, int32(tag)) { - if err = o.skip(st, tag, wire); err == nil { - extmap := e.extensionsWrite() - ext := extmap[int32(tag)] // may be missing - ext.enc = append(ext.enc, o.buf[oi:o.index]...) - extmap[int32(tag)] = ext - } - continue - } - } - // Maybe it's a oneof? - if prop.oneofUnmarshaler != nil { - m := structPointer_Interface(base, st).(Message) - // First return value indicates whether tag is a oneof field. - ok, err = prop.oneofUnmarshaler(m, tag, wire, o) - if err == ErrInternalBadWireType { - // Map the error to something more descriptive. - // Do the formatting here to save generated code space. - err = fmt.Errorf("bad wiretype for oneof field in %T", m) - } - if ok { - continue - } - } - err = o.skipAndSave(st, tag, wire, base, prop.unrecField) - continue - } - p := prop.Prop[fieldnum] - - if p.dec == nil { - fmt.Fprintf(os.Stderr, "proto: no protobuf decoder for %s.%s\n", st, st.Field(fieldnum).Name) - continue - } - dec := p.dec - if wire != WireStartGroup && wire != p.WireType { - if wire == WireBytes && p.packedDec != nil { - // a packable field - dec = p.packedDec - } else { - err = fmt.Errorf("proto: bad wiretype for field %s.%s: got wiretype %d, want %d", st, st.Field(fieldnum).Name, wire, p.WireType) - continue - } - } - decErr := dec(o, p, base) - if decErr != nil && !state.shouldContinue(decErr, p) { - err = decErr - } - if err == nil && p.Required { - // Successfully decoded a required field. - if tag <= 64 { - // use bitmap for fields 1-64 to catch field reuse. - var mask uint64 = 1 << uint64(tag-1) - if reqFields&mask == 0 { - // new required field - reqFields |= mask - required-- - } - } else { - // This is imprecise. It can be fooled by a required field - // with a tag > 64 that is encoded twice; that's very rare. - // A fully correct implementation would require allocating - // a data structure, which we would like to avoid. - required-- - } - } - } - if err == nil { - if is_group { - return io.ErrUnexpectedEOF - } - if state.err != nil { - return state.err - } - if required > 0 { - // Not enough information to determine the exact field. If we use extra - // CPU, we could determine the field only if the missing required field - // has a tag <= 64 and we check reqFields. - return &RequiredNotSetError{"{Unknown}"} - } - } - return err -} - -// Individual type decoders -// For each, -// u is the decoded value, -// v is a pointer to the field (pointer) in the struct - -// Sizes of the pools to allocate inside the Buffer. -// The goal is modest amortization and allocation -// on at least 16-byte boundaries. -const ( - boolPoolSize = 16 - uint32PoolSize = 8 - uint64PoolSize = 4 -) - -// Decode a bool. -func (o *Buffer) dec_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - if len(o.bools) == 0 { - o.bools = make([]bool, boolPoolSize) - } - o.bools[0] = u != 0 - *structPointer_Bool(base, p.field) = &o.bools[0] - o.bools = o.bools[1:] - return nil -} - -func (o *Buffer) dec_proto3_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - *structPointer_BoolVal(base, p.field) = u != 0 - return nil -} - -// Decode an int32. -func (o *Buffer) dec_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32_Set(structPointer_Word32(base, p.field), o, uint32(u)) - return nil -} - -func (o *Buffer) dec_proto3_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word32Val_Set(structPointer_Word32Val(base, p.field), uint32(u)) - return nil -} - -// Decode an int64. -func (o *Buffer) dec_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64_Set(structPointer_Word64(base, p.field), o, u) - return nil -} - -func (o *Buffer) dec_proto3_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - word64Val_Set(structPointer_Word64Val(base, p.field), o, u) - return nil -} - -// Decode a string. -func (o *Buffer) dec_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_String(base, p.field) = &s - return nil -} - -func (o *Buffer) dec_proto3_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - *structPointer_StringVal(base, p.field) = s - return nil -} - -// Decode a slice of bytes ([]byte). -func (o *Buffer) dec_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - *structPointer_Bytes(base, p.field) = b - return nil -} - -// Decode a slice of bools ([]bool). -func (o *Buffer) dec_slice_bool(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - v := structPointer_BoolSlice(base, p.field) - *v = append(*v, u != 0) - return nil -} - -// Decode a slice of bools ([]bool) in packed format. -func (o *Buffer) dec_slice_packed_bool(p *Properties, base structPointer) error { - v := structPointer_BoolSlice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded bools - fin := o.index + nb - if fin < o.index { - return errOverflow - } - - y := *v - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - y = append(y, u != 0) - } - - *v = y - return nil -} - -// Decode a slice of int32s ([]int32). -func (o *Buffer) dec_slice_int32(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - structPointer_Word32Slice(base, p.field).Append(uint32(u)) - return nil -} - -// Decode a slice of int32s ([]int32) in packed format. -func (o *Buffer) dec_slice_packed_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int32s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(uint32(u)) - } - return nil -} - -// Decode a slice of int64s ([]int64). -func (o *Buffer) dec_slice_int64(p *Properties, base structPointer) error { - u, err := p.valDec(o) - if err != nil { - return err - } - - structPointer_Word64Slice(base, p.field).Append(u) - return nil -} - -// Decode a slice of int64s ([]int64) in packed format. -func (o *Buffer) dec_slice_packed_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Slice(base, p.field) - - nn, err := o.DecodeVarint() - if err != nil { - return err - } - nb := int(nn) // number of bytes of encoded int64s - - fin := o.index + nb - if fin < o.index { - return errOverflow - } - for o.index < fin { - u, err := p.valDec(o) - if err != nil { - return err - } - v.Append(u) - } - return nil -} - -// Decode a slice of strings ([]string). -func (o *Buffer) dec_slice_string(p *Properties, base structPointer) error { - s, err := o.DecodeStringBytes() - if err != nil { - return err - } - v := structPointer_StringSlice(base, p.field) - *v = append(*v, s) - return nil -} - -// Decode a slice of slice of bytes ([][]byte). -func (o *Buffer) dec_slice_slice_byte(p *Properties, base structPointer) error { - b, err := o.DecodeRawBytes(true) - if err != nil { - return err - } - v := structPointer_BytesSlice(base, p.field) - *v = append(*v, b) - return nil -} - -// Decode a map field. -func (o *Buffer) dec_new_map(p *Properties, base structPointer) error { - raw, err := o.DecodeRawBytes(false) - if err != nil { - return err - } - oi := o.index // index at the end of this map entry - o.index -= len(raw) // move buffer back to start of map entry - - mptr := structPointer_NewAt(base, p.field, p.mtype) // *map[K]V - if mptr.Elem().IsNil() { - mptr.Elem().Set(reflect.MakeMap(mptr.Type().Elem())) - } - v := mptr.Elem() // map[K]V - - // Prepare addressable doubly-indirect placeholders for the key and value types. - // See enc_new_map for why. - keyptr := reflect.New(reflect.PtrTo(p.mtype.Key())).Elem() // addressable *K - keybase := toStructPointer(keyptr.Addr()) // **K - - var valbase structPointer - var valptr reflect.Value - switch p.mtype.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valptr = reflect.ValueOf(&dummy) // *[]byte - valbase = toStructPointer(valptr) // *[]byte - case reflect.Ptr: - // message; valptr is **Msg; need to allocate the intermediate pointer - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valptr.Set(reflect.New(valptr.Type().Elem())) - valbase = toStructPointer(valptr) - default: - // everything else - valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V - valbase = toStructPointer(valptr.Addr()) // **V - } - - // Decode. - // This parses a restricted wire format, namely the encoding of a message - // with two fields. See enc_new_map for the format. - for o.index < oi { - // tagcode for key and value properties are always a single byte - // because they have tags 1 and 2. - tagcode := o.buf[o.index] - o.index++ - switch tagcode { - case p.mkeyprop.tagcode[0]: - if err := p.mkeyprop.dec(o, p.mkeyprop, keybase); err != nil { - return err - } - case p.mvalprop.tagcode[0]: - if err := p.mvalprop.dec(o, p.mvalprop, valbase); err != nil { - return err - } - default: - // TODO: Should we silently skip this instead? - return fmt.Errorf("proto: bad map data tag %d", raw[0]) - } - } - keyelem, valelem := keyptr.Elem(), valptr.Elem() - if !keyelem.IsValid() { - keyelem = reflect.Zero(p.mtype.Key()) - } - if !valelem.IsValid() { - valelem = reflect.Zero(p.mtype.Elem()) - } - - v.SetMapIndex(keyelem, valelem) - return nil -} - -// Decode a group. -func (o *Buffer) dec_struct_group(p *Properties, base structPointer) error { - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - return o.unmarshalType(p.stype, p.sprop, true, bas) -} - -// Decode an embedded message. -func (o *Buffer) dec_struct_message(p *Properties, base structPointer) (err error) { - raw, e := o.DecodeRawBytes(false) - if e != nil { - return e - } - - bas := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(bas) { - // allocate new nested message - bas = toStructPointer(reflect.New(p.stype)) - structPointer_SetStructPointer(base, p.field, bas) - } - - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := structPointer_Interface(bas, p.stype) - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, false, bas) - o.buf = obuf - o.index = oi - - return err -} - -// Decode a slice of embedded messages. -func (o *Buffer) dec_slice_struct_message(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, false, base) -} - -// Decode a slice of embedded groups. -func (o *Buffer) dec_slice_struct_group(p *Properties, base structPointer) error { - return o.dec_slice_struct(p, true, base) -} - -// Decode a slice of structs ([]*struct). -func (o *Buffer) dec_slice_struct(p *Properties, is_group bool, base structPointer) error { - v := reflect.New(p.stype) - bas := toStructPointer(v) - structPointer_StructPointerSlice(base, p.field).Append(bas) - - if is_group { - err := o.unmarshalType(p.stype, p.sprop, is_group, bas) - return err - } - - raw, err := o.DecodeRawBytes(false) - if err != nil { - return err - } - - // If the object can unmarshal itself, let it. - if p.isUnmarshaler { - iv := v.Interface() - return iv.(Unmarshaler).Unmarshal(raw) - } - - obuf := o.buf - oi := o.index - o.buf = raw - o.index = 0 - - err = o.unmarshalType(p.stype, p.sprop, is_group, bas) - - o.buf = obuf - o.index = oi - - return err -} diff --git a/vendor/github.com/golang/protobuf/proto/encode.go b/vendor/github.com/golang/protobuf/proto/encode.go deleted file mode 100644 index 2b30f84..0000000 --- a/vendor/github.com/golang/protobuf/proto/encode.go +++ /dev/null @@ -1,1362 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - -import ( - "errors" - "fmt" - "reflect" - "sort" -) - -// RequiredNotSetError is the error returned if Marshal is called with -// a protocol buffer struct whose required fields have not -// all been initialized. It is also the error returned if Unmarshal is -// called with an encoded protocol buffer that does not include all the -// required fields. -// -// When printed, RequiredNotSetError reports the first unset required field in a -// message. If the field cannot be precisely determined, it is reported as -// "{Unknown}". -type RequiredNotSetError struct { - field string -} - -func (e *RequiredNotSetError) Error() string { - return fmt.Sprintf("proto: required field %q not set", e.field) -} - -var ( - // errRepeatedHasNil is the error returned if Marshal is called with - // a struct with a repeated field containing a nil element. - errRepeatedHasNil = errors.New("proto: repeated field has nil element") - - // errOneofHasNil is the error returned if Marshal is called with - // a struct with a oneof field containing a nil element. - errOneofHasNil = errors.New("proto: oneof field has nil value") - - // ErrNil is the error returned if Marshal is called with nil. - ErrNil = errors.New("proto: Marshal called with nil") - - // ErrTooLarge is the error returned if Marshal is called with a - // message that encodes to >2GB. - ErrTooLarge = errors.New("proto: message encodes to over 2 GB") -) - -// The fundamental encoders that put bytes on the wire. -// Those that take integer types all accept uint64 and are -// therefore of type valueEncoder. - -const maxVarintBytes = 10 // maximum length of a varint - -// maxMarshalSize is the largest allowed size of an encoded protobuf, -// since C++ and Java use signed int32s for the size. -const maxMarshalSize = 1<<31 - 1 - -// EncodeVarint returns the varint encoding of x. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -// Not used by the package itself, but helpful to clients -// wishing to use the same encoding. -func EncodeVarint(x uint64) []byte { - var buf [maxVarintBytes]byte - var n int - for n = 0; x > 127; n++ { - buf[n] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - buf[n] = uint8(x) - n++ - return buf[0:n] -} - -// EncodeVarint writes a varint-encoded integer to the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) EncodeVarint(x uint64) error { - for x >= 1<<7 { - p.buf = append(p.buf, uint8(x&0x7f|0x80)) - x >>= 7 - } - p.buf = append(p.buf, uint8(x)) - return nil -} - -// SizeVarint returns the varint encoding size of an integer. -func SizeVarint(x uint64) int { - return sizeVarint(x) -} - -func sizeVarint(x uint64) (n int) { - for { - n++ - x >>= 7 - if x == 0 { - break - } - } - return n -} - -// EncodeFixed64 writes a 64-bit integer to the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) EncodeFixed64(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24), - uint8(x>>32), - uint8(x>>40), - uint8(x>>48), - uint8(x>>56)) - return nil -} - -func sizeFixed64(x uint64) int { - return 8 -} - -// EncodeFixed32 writes a 32-bit integer to the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) EncodeFixed32(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24)) - return nil -} - -func sizeFixed32(x uint64) int { - return 4 -} - -// EncodeZigzag64 writes a zigzag-encoded 64-bit integer -// to the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) EncodeZigzag64(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} - -func sizeZigzag64(x uint64) int { - return sizeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} - -// EncodeZigzag32 writes a zigzag-encoded 32-bit integer -// to the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) EncodeZigzag32(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - -func sizeZigzag32(x uint64) int { - return sizeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - -// EncodeRawBytes writes a count-delimited byte buffer to the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) EncodeRawBytes(b []byte) error { - p.EncodeVarint(uint64(len(b))) - p.buf = append(p.buf, b...) - return nil -} - -func sizeRawBytes(b []byte) int { - return sizeVarint(uint64(len(b))) + - len(b) -} - -// EncodeStringBytes writes an encoded string to the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) EncodeStringBytes(s string) error { - p.EncodeVarint(uint64(len(s))) - p.buf = append(p.buf, s...) - return nil -} - -func sizeStringBytes(s string) int { - return sizeVarint(uint64(len(s))) + - len(s) -} - -// Marshaler is the interface representing objects that can marshal themselves. -type Marshaler interface { - Marshal() ([]byte, error) -} - -// Marshal takes the protocol buffer -// and encodes it into the wire format, returning the data. -func Marshal(pb Message) ([]byte, error) { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - return m.Marshal() - } - p := NewBuffer(nil) - err := p.Marshal(pb) - if p.buf == nil && err == nil { - // Return a non-nil slice on success. - return []byte{}, nil - } - return p.buf, err -} - -// EncodeMessage writes the protocol buffer to the Buffer, -// prefixed by a varint-encoded length. -func (p *Buffer) EncodeMessage(pb Message) error { - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - var state errorState - err = p.enc_len_struct(GetProperties(t.Elem()), base, &state) - } - return err -} - -// Marshal takes the protocol buffer -// and encodes it into the wire format, writing the result to the -// Buffer. -func (p *Buffer) Marshal(pb Message) error { - // Can the object marshal itself? - if m, ok := pb.(Marshaler); ok { - data, err := m.Marshal() - p.buf = append(p.buf, data...) - return err - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return ErrNil - } - if err == nil { - err = p.enc_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Encode++ // Parens are to work around a goimports bug. - } - - if len(p.buf) > maxMarshalSize { - return ErrTooLarge - } - return err -} - -// Size returns the encoded size of a protocol buffer. -func Size(pb Message) (n int) { - // Can the object marshal itself? If so, Size is slow. - // TODO: add Size to Marshaler, or add a Sizer interface. - if m, ok := pb.(Marshaler); ok { - b, _ := m.Marshal() - return len(b) - } - - t, base, err := getbase(pb) - if structPointer_IsNil(base) { - return 0 - } - if err == nil { - n = size_struct(GetProperties(t.Elem()), base) - } - - if collectStats { - (stats).Size++ // Parens are to work around a goimports bug. - } - - return -} - -// Individual type encoders. - -// Encode a bool. -func (o *Buffer) enc_bool(p *Properties, base structPointer) error { - v := *structPointer_Bool(base, p.field) - if v == nil { - return ErrNil - } - x := 0 - if *v { - x = 1 - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_bool(p *Properties, base structPointer) error { - v := *structPointer_BoolVal(base, p.field) - if !v { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, 1) - return nil -} - -func size_bool(p *Properties, base structPointer) int { - v := *structPointer_Bool(base, p.field) - if v == nil { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -func size_proto3_bool(p *Properties, base structPointer) int { - v := *structPointer_BoolVal(base, p.field) - if !v && !p.oneof { - return 0 - } - return len(p.tagcode) + 1 // each bool takes exactly one byte -} - -// Encode an int32. -func (o *Buffer) enc_int32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_int32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_int32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode a uint32. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return ErrNil - } - x := word32_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func (o *Buffer) enc_proto3_uint32(p *Properties, base structPointer) error { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, uint64(x)) - return nil -} - -func size_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32(base, p.field) - if word32_IsNil(v) { - return 0 - } - x := word32_Get(v) - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -func size_proto3_uint32(p *Properties, base structPointer) (n int) { - v := structPointer_Word32Val(base, p.field) - x := word32Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(uint64(x)) - return -} - -// Encode an int64. -func (o *Buffer) enc_int64(p *Properties, base structPointer) error { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return ErrNil - } - x := word64_Get(v) - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func (o *Buffer) enc_proto3_int64(p *Properties, base structPointer) error { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, x) - return nil -} - -func size_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64(base, p.field) - if word64_IsNil(v) { - return 0 - } - x := word64_Get(v) - n += len(p.tagcode) - n += p.valSize(x) - return -} - -func size_proto3_int64(p *Properties, base structPointer) (n int) { - v := structPointer_Word64Val(base, p.field) - x := word64Val_Get(v) - if x == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += p.valSize(x) - return -} - -// Encode a string. -func (o *Buffer) enc_string(p *Properties, base structPointer) error { - v := *structPointer_String(base, p.field) - if v == nil { - return ErrNil - } - x := *v - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(x) - return nil -} - -func (o *Buffer) enc_proto3_string(p *Properties, base structPointer) error { - v := *structPointer_StringVal(base, p.field) - if v == "" { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(v) - return nil -} - -func size_string(p *Properties, base structPointer) (n int) { - v := *structPointer_String(base, p.field) - if v == nil { - return 0 - } - x := *v - n += len(p.tagcode) - n += sizeStringBytes(x) - return -} - -func size_proto3_string(p *Properties, base structPointer) (n int) { - v := *structPointer_StringVal(base, p.field) - if v == "" && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeStringBytes(v) - return -} - -// All protocol buffer fields are nillable, but be careful. -func isNil(v reflect.Value) bool { - switch v.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return v.IsNil() - } - return false -} - -// Encode a message struct. -func (o *Buffer) enc_struct_message(p *Properties, base structPointer) error { - var state errorState - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return ErrNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - return state.err - } - - o.buf = append(o.buf, p.tagcode...) - return o.enc_len_struct(p.sprop, structp, &state) -} - -func size_struct_message(p *Properties, base structPointer) int { - structp := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(structp) { - return 0 - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n0 := len(p.tagcode) - n1 := sizeRawBytes(data) - return n0 + n1 - } - - n0 := len(p.tagcode) - n1 := size_struct(p.sprop, structp) - n2 := sizeVarint(uint64(n1)) // size of encoded length - return n0 + n1 + n2 -} - -// Encode a group struct. -func (o *Buffer) enc_struct_group(p *Properties, base structPointer) error { - var state errorState - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return ErrNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - err := o.enc_struct(p.sprop, b) - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return state.err -} - -func size_struct_group(p *Properties, base structPointer) (n int) { - b := structPointer_GetStructPointer(base, p.field) - if structPointer_IsNil(b) { - return 0 - } - - n += sizeVarint(uint64((p.Tag << 3) | WireStartGroup)) - n += size_struct(p.sprop, b) - n += sizeVarint(uint64((p.Tag << 3) | WireEndGroup)) - return -} - -// Encode a slice of bools ([]bool). -func (o *Buffer) enc_slice_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - for _, x := range s { - o.buf = append(o.buf, p.tagcode...) - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_bool(p *Properties, base structPointer) int { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - return l * (len(p.tagcode) + 1) // each bool takes exactly one byte -} - -// Encode a slice of bools ([]bool) in packed format. -func (o *Buffer) enc_slice_packed_bool(p *Properties, base structPointer) error { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(l)) // each bool takes exactly one byte - for _, x := range s { - v := uint64(0) - if x { - v = 1 - } - p.valEnc(o, v) - } - return nil -} - -func size_slice_packed_bool(p *Properties, base structPointer) (n int) { - s := *structPointer_BoolSlice(base, p.field) - l := len(s) - if l == 0 { - return 0 - } - n += len(p.tagcode) - n += sizeVarint(uint64(l)) - n += l // each bool takes exactly one byte - return -} - -// Encode a slice of bytes ([]byte). -func (o *Buffer) enc_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if s == nil { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func (o *Buffer) enc_proto3_slice_byte(p *Properties, base structPointer) error { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 { - return ErrNil - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(s) - return nil -} - -func size_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if s == nil && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -func size_proto3_slice_byte(p *Properties, base structPointer) (n int) { - s := *structPointer_Bytes(base, p.field) - if len(s) == 0 && !p.oneof { - return 0 - } - n += len(p.tagcode) - n += sizeRawBytes(s) - return -} - -// Encode a slice of int32s ([]int32). -func (o *Buffer) enc_slice_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of int32s ([]int32) in packed format. -func (o *Buffer) enc_slice_packed_int32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - p.valEnc(buf, uint64(x)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - x := int32(s.Index(i)) // permit sign extension to use full 64-bit range - bufSize += p.valSize(uint64(x)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of uint32s ([]uint32). -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - x := s.Index(i) - p.valEnc(o, uint64(x)) - } - return nil -} - -func size_slice_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - x := s.Index(i) - n += p.valSize(uint64(x)) - } - return -} - -// Encode a slice of uint32s ([]uint32) in packed format. -// Exactly the same as int32, except for no sign extension. -func (o *Buffer) enc_slice_packed_uint32(p *Properties, base structPointer) error { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, uint64(s.Index(i))) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_uint32(p *Properties, base structPointer) (n int) { - s := structPointer_Word32Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(uint64(s.Index(i))) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of int64s ([]int64). -func (o *Buffer) enc_slice_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - p.valEnc(o, s.Index(i)) - } - return nil -} - -func size_slice_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - for i := 0; i < l; i++ { - n += len(p.tagcode) - n += p.valSize(s.Index(i)) - } - return -} - -// Encode a slice of int64s ([]int64) in packed format. -func (o *Buffer) enc_slice_packed_int64(p *Properties, base structPointer) error { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return ErrNil - } - // TODO: Reuse a Buffer. - buf := NewBuffer(nil) - for i := 0; i < l; i++ { - p.valEnc(buf, s.Index(i)) - } - - o.buf = append(o.buf, p.tagcode...) - o.EncodeVarint(uint64(len(buf.buf))) - o.buf = append(o.buf, buf.buf...) - return nil -} - -func size_slice_packed_int64(p *Properties, base structPointer) (n int) { - s := structPointer_Word64Slice(base, p.field) - l := s.Len() - if l == 0 { - return 0 - } - var bufSize int - for i := 0; i < l; i++ { - bufSize += p.valSize(s.Index(i)) - } - - n += len(p.tagcode) - n += sizeVarint(uint64(bufSize)) - n += bufSize - return -} - -// Encode a slice of slice of bytes ([][]byte). -func (o *Buffer) enc_slice_slice_byte(p *Properties, base structPointer) error { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return ErrNil - } - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(ss[i]) - } - return nil -} - -func size_slice_slice_byte(p *Properties, base structPointer) (n int) { - ss := *structPointer_BytesSlice(base, p.field) - l := len(ss) - if l == 0 { - return 0 - } - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeRawBytes(ss[i]) - } - return -} - -// Encode a slice of strings ([]string). -func (o *Buffer) enc_slice_string(p *Properties, base structPointer) error { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - for i := 0; i < l; i++ { - o.buf = append(o.buf, p.tagcode...) - o.EncodeStringBytes(ss[i]) - } - return nil -} - -func size_slice_string(p *Properties, base structPointer) (n int) { - ss := *structPointer_StringSlice(base, p.field) - l := len(ss) - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - n += sizeStringBytes(ss[i]) - } - return -} - -// Encode a slice of message structs ([]*struct). -func (o *Buffer) enc_slice_struct_message(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return errRepeatedHasNil - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, err := m.Marshal() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - o.buf = append(o.buf, p.tagcode...) - o.EncodeRawBytes(data) - continue - } - - o.buf = append(o.buf, p.tagcode...) - err := o.enc_len_struct(p.sprop, structp, &state) - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - } - return state.err -} - -func size_slice_struct_message(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - n += l * len(p.tagcode) - for i := 0; i < l; i++ { - structp := s.Index(i) - if structPointer_IsNil(structp) { - return // return the size up to this point - } - - // Can the object marshal itself? - if p.isMarshaler { - m := structPointer_Interface(structp, p.stype).(Marshaler) - data, _ := m.Marshal() - n += sizeRawBytes(data) - continue - } - - n0 := size_struct(p.sprop, structp) - n1 := sizeVarint(uint64(n0)) // size of encoded length - n += n0 + n1 - } - return -} - -// Encode a slice of group structs ([]*struct). -func (o *Buffer) enc_slice_struct_group(p *Properties, base structPointer) error { - var state errorState - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return errRepeatedHasNil - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) - - err := o.enc_struct(p.sprop, b) - - if err != nil && !state.shouldContinue(err, nil) { - if err == ErrNil { - return errRepeatedHasNil - } - return err - } - - o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) - } - return state.err -} - -func size_slice_struct_group(p *Properties, base structPointer) (n int) { - s := structPointer_StructPointerSlice(base, p.field) - l := s.Len() - - n += l * sizeVarint(uint64((p.Tag<<3)|WireStartGroup)) - n += l * sizeVarint(uint64((p.Tag<<3)|WireEndGroup)) - for i := 0; i < l; i++ { - b := s.Index(i) - if structPointer_IsNil(b) { - return // return size up to this point - } - - n += size_struct(p.sprop, b) - } - return -} - -// Encode an extension map. -func (o *Buffer) enc_map(p *Properties, base structPointer) error { - exts := structPointer_ExtMap(base, p.field) - if err := encodeExtensionsMap(*exts); err != nil { - return err - } - - return o.enc_map_body(*exts) -} - -func (o *Buffer) enc_exts(p *Properties, base structPointer) error { - exts := structPointer_Extensions(base, p.field) - - v, mu := exts.extensionsRead() - if v == nil { - return nil - } - - mu.Lock() - defer mu.Unlock() - if err := encodeExtensionsMap(v); err != nil { - return err - } - - return o.enc_map_body(v) -} - -func (o *Buffer) enc_map_body(v map[int32]Extension) error { - // Fast-path for common cases: zero or one extensions. - if len(v) <= 1 { - for _, e := range v { - o.buf = append(o.buf, e.enc...) - } - return nil - } - - // Sort keys to provide a deterministic encoding. - keys := make([]int, 0, len(v)) - for k := range v { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, k := range keys { - o.buf = append(o.buf, v[int32(k)].enc...) - } - return nil -} - -func size_map(p *Properties, base structPointer) int { - v := structPointer_ExtMap(base, p.field) - return extensionsMapSize(*v) -} - -func size_exts(p *Properties, base structPointer) int { - v := structPointer_Extensions(base, p.field) - return extensionsSize(v) -} - -// Encode a map field. -func (o *Buffer) enc_new_map(p *Properties, base structPointer) error { - var state errorState // XXX: or do we need to plumb this through? - - /* - A map defined as - map map_field = N; - is encoded in the same way as - message MapFieldEntry { - key_type key = 1; - value_type value = 2; - } - repeated MapFieldEntry map_field = N; - */ - - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - if v.Len() == 0 { - return nil - } - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - enc := func() error { - if err := p.mkeyprop.enc(o, p.mkeyprop, keybase); err != nil { - return err - } - if err := p.mvalprop.enc(o, p.mvalprop, valbase); err != nil && err != ErrNil { - return err - } - return nil - } - - // Don't sort map keys. It is not required by the spec, and C++ doesn't do it. - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - - keycopy.Set(key) - valcopy.Set(val) - - o.buf = append(o.buf, p.tagcode...) - if err := o.enc_len_thing(enc, &state); err != nil { - return err - } - } - return nil -} - -func size_new_map(p *Properties, base structPointer) int { - v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V - - keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) - - n := 0 - for _, key := range v.MapKeys() { - val := v.MapIndex(key) - keycopy.Set(key) - valcopy.Set(val) - - // Tag codes for key and val are the responsibility of the sub-sizer. - keysize := p.mkeyprop.size(p.mkeyprop, keybase) - valsize := p.mvalprop.size(p.mvalprop, valbase) - entry := keysize + valsize - // Add on tag code and length of map entry itself. - n += len(p.tagcode) + sizeVarint(uint64(entry)) + entry - } - return n -} - -// mapEncodeScratch returns a new reflect.Value matching the map's value type, -// and a structPointer suitable for passing to an encoder or sizer. -func mapEncodeScratch(mapType reflect.Type) (keycopy, valcopy reflect.Value, keybase, valbase structPointer) { - // Prepare addressable doubly-indirect placeholders for the key and value types. - // This is needed because the element-type encoders expect **T, but the map iteration produces T. - - keycopy = reflect.New(mapType.Key()).Elem() // addressable K - keyptr := reflect.New(reflect.PtrTo(keycopy.Type())).Elem() // addressable *K - keyptr.Set(keycopy.Addr()) // - keybase = toStructPointer(keyptr.Addr()) // **K - - // Value types are more varied and require special handling. - switch mapType.Elem().Kind() { - case reflect.Slice: - // []byte - var dummy []byte - valcopy = reflect.ValueOf(&dummy).Elem() // addressable []byte - valbase = toStructPointer(valcopy.Addr()) - case reflect.Ptr: - // message; the generated field type is map[K]*Msg (so V is *Msg), - // so we only need one level of indirection. - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valbase = toStructPointer(valcopy.Addr()) - default: - // everything else - valcopy = reflect.New(mapType.Elem()).Elem() // addressable V - valptr := reflect.New(reflect.PtrTo(valcopy.Type())).Elem() // addressable *V - valptr.Set(valcopy.Addr()) // - valbase = toStructPointer(valptr.Addr()) // **V - } - return -} - -// Encode a struct. -func (o *Buffer) enc_struct(prop *StructProperties, base structPointer) error { - var state errorState - // Encode fields in tag order so that decoders may use optimizations - // that depend on the ordering. - // https://developers.google.com/protocol-buffers/docs/encoding#order - for _, i := range prop.order { - p := prop.Prop[i] - if p.enc != nil { - err := p.enc(o, p, base) - if err != nil { - if err == ErrNil { - if p.Required && state.err == nil { - state.err = &RequiredNotSetError{p.Name} - } - } else if err == errRepeatedHasNil { - // Give more context to nil values in repeated fields. - return errors.New("repeated field " + p.OrigName + " has nil element") - } else if !state.shouldContinue(err, p) { - return err - } - } - if len(o.buf) > maxMarshalSize { - return ErrTooLarge - } - } - } - - // Do oneof fields. - if prop.oneofMarshaler != nil { - m := structPointer_Interface(base, prop.stype).(Message) - if err := prop.oneofMarshaler(m, o); err == ErrNil { - return errOneofHasNil - } else if err != nil { - return err - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - if len(o.buf)+len(v) > maxMarshalSize { - return ErrTooLarge - } - if len(v) > 0 { - o.buf = append(o.buf, v...) - } - } - - return state.err -} - -func size_struct(prop *StructProperties, base structPointer) (n int) { - for _, i := range prop.order { - p := prop.Prop[i] - if p.size != nil { - n += p.size(p, base) - } - } - - // Add unrecognized fields at the end. - if prop.unrecField.IsValid() { - v := *structPointer_Bytes(base, prop.unrecField) - n += len(v) - } - - // Factor in any oneof fields. - if prop.oneofSizer != nil { - m := structPointer_Interface(base, prop.stype).(Message) - n += prop.oneofSizer(m) - } - - return -} - -var zeroes [20]byte // longer than any conceivable sizeVarint - -// Encode a struct, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_struct(prop *StructProperties, base structPointer, state *errorState) error { - return o.enc_len_thing(func() error { return o.enc_struct(prop, base) }, state) -} - -// Encode something, preceded by its encoded length (as a varint). -func (o *Buffer) enc_len_thing(enc func() error, state *errorState) error { - iLen := len(o.buf) - o.buf = append(o.buf, 0, 0, 0, 0) // reserve four bytes for length - iMsg := len(o.buf) - err := enc() - if err != nil && !state.shouldContinue(err, nil) { - return err - } - lMsg := len(o.buf) - iMsg - lLen := sizeVarint(uint64(lMsg)) - switch x := lLen - (iMsg - iLen); { - case x > 0: // actual length is x bytes larger than the space we reserved - // Move msg x bytes right. - o.buf = append(o.buf, zeroes[:x]...) - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - case x < 0: // actual length is x bytes smaller than the space we reserved - // Move msg x bytes left. - copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) - o.buf = o.buf[:len(o.buf)+x] // x is negative - } - // Encode the length in the reserved space. - o.buf = o.buf[:iLen] - o.EncodeVarint(uint64(lMsg)) - o.buf = o.buf[:len(o.buf)+lMsg] - return state.err -} - -// errorState maintains the first error that occurs and updates that error -// with additional context. -type errorState struct { - err error -} - -// shouldContinue reports whether encoding should continue upon encountering the -// given error. If the error is RequiredNotSetError, shouldContinue returns true -// and, if this is the first appearance of that error, remembers it for future -// reporting. -// -// If prop is not nil, it may update any error with additional context about the -// field with the error. -func (s *errorState) shouldContinue(err error, prop *Properties) bool { - // Ignore unset required fields. - reqNotSet, ok := err.(*RequiredNotSetError) - if !ok { - return false - } - if s.err == nil { - if prop != nil { - err = &RequiredNotSetError{prop.Name + "." + reqNotSet.field} - } - s.err = err - } - return true -} diff --git a/vendor/github.com/golang/protobuf/proto/equal.go b/vendor/github.com/golang/protobuf/proto/equal.go deleted file mode 100644 index 2ed1cf5..0000000 --- a/vendor/github.com/golang/protobuf/proto/equal.go +++ /dev/null @@ -1,300 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer comparison. - -package proto - -import ( - "bytes" - "log" - "reflect" - "strings" -) - -/* -Equal returns true iff protocol buffers a and b are equal. -The arguments must both be pointers to protocol buffer structs. - -Equality is defined in this way: - - Two messages are equal iff they are the same type, - corresponding fields are equal, unknown field sets - are equal, and extensions sets are equal. - - Two set scalar fields are equal iff their values are equal. - If the fields are of a floating-point type, remember that - NaN != x for all x, including NaN. If the message is defined - in a proto3 .proto file, fields are not "set"; specifically, - zero length proto3 "bytes" fields are equal (nil == {}). - - Two repeated fields are equal iff their lengths are the same, - and their corresponding elements are equal. Note a "bytes" field, - although represented by []byte, is not a repeated field and the - rule for the scalar fields described above applies. - - Two unset fields are equal. - - Two unknown field sets are equal if their current - encoded state is equal. - - Two extension sets are equal iff they have corresponding - elements that are pairwise equal. - - Two map fields are equal iff their lengths are the same, - and they contain the same set of elements. Zero-length map - fields are equal. - - Every other combination of things are not equal. - -The return value is undefined if a and b are not protocol buffers. -*/ -func Equal(a, b Message) bool { - if a == nil || b == nil { - return a == b - } - v1, v2 := reflect.ValueOf(a), reflect.ValueOf(b) - if v1.Type() != v2.Type() { - return false - } - if v1.Kind() == reflect.Ptr { - if v1.IsNil() { - return v2.IsNil() - } - if v2.IsNil() { - return false - } - v1, v2 = v1.Elem(), v2.Elem() - } - if v1.Kind() != reflect.Struct { - return false - } - return equalStruct(v1, v2) -} - -// v1 and v2 are known to have the same type. -func equalStruct(v1, v2 reflect.Value) bool { - sprop := GetProperties(v1.Type()) - for i := 0; i < v1.NumField(); i++ { - f := v1.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - f1, f2 := v1.Field(i), v2.Field(i) - if f.Type.Kind() == reflect.Ptr { - if n1, n2 := f1.IsNil(), f2.IsNil(); n1 && n2 { - // both unset - continue - } else if n1 != n2 { - // set/unset mismatch - return false - } - b1, ok := f1.Interface().(raw) - if ok { - b2 := f2.Interface().(raw) - // RawMessage - if !bytes.Equal(b1.Bytes(), b2.Bytes()) { - return false - } - continue - } - f1, f2 = f1.Elem(), f2.Elem() - } - if !equalAny(f1, f2, sprop.Prop[i]) { - return false - } - } - - if em1 := v1.FieldByName("XXX_InternalExtensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_InternalExtensions") - if !equalExtensions(v1.Type(), em1.Interface().(XXX_InternalExtensions), em2.Interface().(XXX_InternalExtensions)) { - return false - } - } - - if em1 := v1.FieldByName("XXX_extensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_extensions") - if !equalExtMap(v1.Type(), em1.Interface().(map[int32]Extension), em2.Interface().(map[int32]Extension)) { - return false - } - } - - uf := v1.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return true - } - - u1 := uf.Bytes() - u2 := v2.FieldByName("XXX_unrecognized").Bytes() - if !bytes.Equal(u1, u2) { - return false - } - - return true -} - -// v1 and v2 are known to have the same type. -// prop may be nil. -func equalAny(v1, v2 reflect.Value, prop *Properties) bool { - if v1.Type() == protoMessageType { - m1, _ := v1.Interface().(Message) - m2, _ := v2.Interface().(Message) - return Equal(m1, m2) - } - switch v1.Kind() { - case reflect.Bool: - return v1.Bool() == v2.Bool() - case reflect.Float32, reflect.Float64: - return v1.Float() == v2.Float() - case reflect.Int32, reflect.Int64: - return v1.Int() == v2.Int() - case reflect.Interface: - // Probably a oneof field; compare the inner values. - n1, n2 := v1.IsNil(), v2.IsNil() - if n1 || n2 { - return n1 == n2 - } - e1, e2 := v1.Elem(), v2.Elem() - if e1.Type() != e2.Type() { - return false - } - return equalAny(e1, e2, nil) - case reflect.Map: - if v1.Len() != v2.Len() { - return false - } - for _, key := range v1.MapKeys() { - val2 := v2.MapIndex(key) - if !val2.IsValid() { - // This key was not found in the second map. - return false - } - if !equalAny(v1.MapIndex(key), val2, nil) { - return false - } - } - return true - case reflect.Ptr: - // Maps may have nil values in them, so check for nil. - if v1.IsNil() && v2.IsNil() { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return equalAny(v1.Elem(), v2.Elem(), prop) - case reflect.Slice: - if v1.Type().Elem().Kind() == reflect.Uint8 { - // short circuit: []byte - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value. - if prop != nil && prop.proto3 && v1.Len() == 0 && v2.Len() == 0 { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return bytes.Equal(v1.Interface().([]byte), v2.Interface().([]byte)) - } - - if v1.Len() != v2.Len() { - return false - } - for i := 0; i < v1.Len(); i++ { - if !equalAny(v1.Index(i), v2.Index(i), prop) { - return false - } - } - return true - case reflect.String: - return v1.Interface().(string) == v2.Interface().(string) - case reflect.Struct: - return equalStruct(v1, v2) - case reflect.Uint32, reflect.Uint64: - return v1.Uint() == v2.Uint() - } - - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to compare %v", v1) - return false -} - -// base is the struct type that the extensions are based on. -// x1 and x2 are InternalExtensions. -func equalExtensions(base reflect.Type, x1, x2 XXX_InternalExtensions) bool { - em1, _ := x1.extensionsRead() - em2, _ := x2.extensionsRead() - return equalExtMap(base, em1, em2) -} - -func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { - if len(em1) != len(em2) { - return false - } - - for extNum, e1 := range em1 { - e2, ok := em2[extNum] - if !ok { - return false - } - - m1, m2 := e1.value, e2.value - - if m1 != nil && m2 != nil { - // Both are unencoded. - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - continue - } - - // At least one is encoded. To do a semantically correct comparison - // we need to unmarshal them first. - var desc *ExtensionDesc - if m := extensionMaps[base]; m != nil { - desc = m[extNum] - } - if desc == nil { - log.Printf("proto: don't know how to compare extension %d of %v", extNum, base) - continue - } - var err error - if m1 == nil { - m1, err = decodeExtension(e1.enc, desc) - } - if m2 == nil && err == nil { - m2, err = decodeExtension(e2.enc, desc) - } - if err != nil { - // The encoded form is invalid. - log.Printf("proto: badly encoded extension %d of %v: %v", extNum, base, err) - return false - } - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - } - - return true -} diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go deleted file mode 100644 index eaad218..0000000 --- a/vendor/github.com/golang/protobuf/proto/extensions.go +++ /dev/null @@ -1,587 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Types and routines for supporting protocol buffer extensions. - */ - -import ( - "errors" - "fmt" - "reflect" - "strconv" - "sync" -) - -// ErrMissingExtension is the error returned by GetExtension if the named extension is not in the message. -var ErrMissingExtension = errors.New("proto: missing extension") - -// ExtensionRange represents a range of message extensions for a protocol buffer. -// Used in code generated by the protocol compiler. -type ExtensionRange struct { - Start, End int32 // both inclusive -} - -// extendableProto is an interface implemented by any protocol buffer generated by the current -// proto compiler that may be extended. -type extendableProto interface { - Message - ExtensionRangeArray() []ExtensionRange - extensionsWrite() map[int32]Extension - extensionsRead() (map[int32]Extension, sync.Locker) -} - -// extendableProtoV1 is an interface implemented by a protocol buffer generated by the previous -// version of the proto compiler that may be extended. -type extendableProtoV1 interface { - Message - ExtensionRangeArray() []ExtensionRange - ExtensionMap() map[int32]Extension -} - -// extensionAdapter is a wrapper around extendableProtoV1 that implements extendableProto. -type extensionAdapter struct { - extendableProtoV1 -} - -func (e extensionAdapter) extensionsWrite() map[int32]Extension { - return e.ExtensionMap() -} - -func (e extensionAdapter) extensionsRead() (map[int32]Extension, sync.Locker) { - return e.ExtensionMap(), notLocker{} -} - -// notLocker is a sync.Locker whose Lock and Unlock methods are nops. -type notLocker struct{} - -func (n notLocker) Lock() {} -func (n notLocker) Unlock() {} - -// extendable returns the extendableProto interface for the given generated proto message. -// If the proto message has the old extension format, it returns a wrapper that implements -// the extendableProto interface. -func extendable(p interface{}) (extendableProto, bool) { - if ep, ok := p.(extendableProto); ok { - return ep, ok - } - if ep, ok := p.(extendableProtoV1); ok { - return extensionAdapter{ep}, ok - } - return nil, false -} - -// XXX_InternalExtensions is an internal representation of proto extensions. -// -// Each generated message struct type embeds an anonymous XXX_InternalExtensions field, -// thus gaining the unexported 'extensions' method, which can be called only from the proto package. -// -// The methods of XXX_InternalExtensions are not concurrency safe in general, -// but calls to logically read-only methods such as has and get may be executed concurrently. -type XXX_InternalExtensions struct { - // The struct must be indirect so that if a user inadvertently copies a - // generated message and its embedded XXX_InternalExtensions, they - // avoid the mayhem of a copied mutex. - // - // The mutex serializes all logically read-only operations to p.extensionMap. - // It is up to the client to ensure that write operations to p.extensionMap are - // mutually exclusive with other accesses. - p *struct { - mu sync.Mutex - extensionMap map[int32]Extension - } -} - -// extensionsWrite returns the extension map, creating it on first use. -func (e *XXX_InternalExtensions) extensionsWrite() map[int32]Extension { - if e.p == nil { - e.p = new(struct { - mu sync.Mutex - extensionMap map[int32]Extension - }) - e.p.extensionMap = make(map[int32]Extension) - } - return e.p.extensionMap -} - -// extensionsRead returns the extensions map for read-only use. It may be nil. -// The caller must hold the returned mutex's lock when accessing Elements within the map. -func (e *XXX_InternalExtensions) extensionsRead() (map[int32]Extension, sync.Locker) { - if e.p == nil { - return nil, nil - } - return e.p.extensionMap, &e.p.mu -} - -var extendableProtoType = reflect.TypeOf((*extendableProto)(nil)).Elem() -var extendableProtoV1Type = reflect.TypeOf((*extendableProtoV1)(nil)).Elem() - -// ExtensionDesc represents an extension specification. -// Used in generated code from the protocol compiler. -type ExtensionDesc struct { - ExtendedType Message // nil pointer to the type that is being extended - ExtensionType interface{} // nil pointer to the extension type - Field int32 // field number - Name string // fully-qualified name of extension, for text formatting - Tag string // protobuf tag style - Filename string // name of the file in which the extension is defined -} - -func (ed *ExtensionDesc) repeated() bool { - t := reflect.TypeOf(ed.ExtensionType) - return t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 -} - -// Extension represents an extension in a message. -type Extension struct { - // When an extension is stored in a message using SetExtension - // only desc and value are set. When the message is marshaled - // enc will be set to the encoded form of the message. - // - // When a message is unmarshaled and contains extensions, each - // extension will have only enc set. When such an extension is - // accessed using GetExtension (or GetExtensions) desc and value - // will be set. - desc *ExtensionDesc - value interface{} - enc []byte -} - -// SetRawExtension is for testing only. -func SetRawExtension(base Message, id int32, b []byte) { - epb, ok := extendable(base) - if !ok { - return - } - extmap := epb.extensionsWrite() - extmap[id] = Extension{enc: b} -} - -// isExtensionField returns true iff the given field number is in an extension range. -func isExtensionField(pb extendableProto, field int32) bool { - for _, er := range pb.ExtensionRangeArray() { - if er.Start <= field && field <= er.End { - return true - } - } - return false -} - -// checkExtensionTypes checks that the given extension is valid for pb. -func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error { - var pbi interface{} = pb - // Check the extended type. - if ea, ok := pbi.(extensionAdapter); ok { - pbi = ea.extendableProtoV1 - } - if a, b := reflect.TypeOf(pbi), reflect.TypeOf(extension.ExtendedType); a != b { - return errors.New("proto: bad extended type; " + b.String() + " does not extend " + a.String()) - } - // Check the range. - if !isExtensionField(pb, extension.Field) { - return errors.New("proto: bad extension number; not in declared ranges") - } - return nil -} - -// extPropKey is sufficient to uniquely identify an extension. -type extPropKey struct { - base reflect.Type - field int32 -} - -var extProp = struct { - sync.RWMutex - m map[extPropKey]*Properties -}{ - m: make(map[extPropKey]*Properties), -} - -func extensionProperties(ed *ExtensionDesc) *Properties { - key := extPropKey{base: reflect.TypeOf(ed.ExtendedType), field: ed.Field} - - extProp.RLock() - if prop, ok := extProp.m[key]; ok { - extProp.RUnlock() - return prop - } - extProp.RUnlock() - - extProp.Lock() - defer extProp.Unlock() - // Check again. - if prop, ok := extProp.m[key]; ok { - return prop - } - - prop := new(Properties) - prop.Init(reflect.TypeOf(ed.ExtensionType), "unknown_name", ed.Tag, nil) - extProp.m[key] = prop - return prop -} - -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensions(e *XXX_InternalExtensions) error { - m, mu := e.extensionsRead() - if m == nil { - return nil // fast path - } - mu.Lock() - defer mu.Unlock() - return encodeExtensionsMap(m) -} - -// encode encodes any unmarshaled (unencoded) extensions in e. -func encodeExtensionsMap(m map[int32]Extension) error { - for k, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - p := NewBuffer(nil) - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - if err := props.enc(p, props, toStructPointer(x)); err != nil { - return err - } - e.enc = p.buf - m[k] = e - } - return nil -} - -func extensionsSize(e *XXX_InternalExtensions) (n int) { - m, mu := e.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - defer mu.Unlock() - return extensionsMapSize(m) -} - -func extensionsMapSize(m map[int32]Extension) (n int) { - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - et := reflect.TypeOf(e.desc.ExtensionType) - props := extensionProperties(e.desc) - - // If e.value has type T, the encoder expects a *struct{ X T }. - // Pass a *T with a zero field and hope it all works out. - x := reflect.New(et) - x.Elem().Set(reflect.ValueOf(e.value)) - n += props.size(props, toStructPointer(x)) - } - return -} - -// HasExtension returns whether the given extension is present in pb. -func HasExtension(pb Message, extension *ExtensionDesc) bool { - // TODO: Check types, field numbers, etc.? - epb, ok := extendable(pb) - if !ok { - return false - } - extmap, mu := epb.extensionsRead() - if extmap == nil { - return false - } - mu.Lock() - _, ok = extmap[extension.Field] - mu.Unlock() - return ok -} - -// ClearExtension removes the given extension from pb. -func ClearExtension(pb Message, extension *ExtensionDesc) { - epb, ok := extendable(pb) - if !ok { - return - } - // TODO: Check types, field numbers, etc.? - extmap := epb.extensionsWrite() - delete(extmap, extension.Field) -} - -// GetExtension parses and returns the given extension of pb. -// If the extension is not present and has no default value it returns ErrMissingExtension. -func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") - } - - if err := checkExtensionTypes(epb, extension); err != nil { - return nil, err - } - - emap, mu := epb.extensionsRead() - if emap == nil { - return defaultExtensionValue(extension) - } - mu.Lock() - defer mu.Unlock() - e, ok := emap[extension.Field] - if !ok { - // defaultExtensionValue returns the default value or - // ErrMissingExtension if there is no default. - return defaultExtensionValue(extension) - } - - if e.value != nil { - // Already decoded. Check the descriptor, though. - if e.desc != extension { - // This shouldn't happen. If it does, it means that - // GetExtension was called twice with two different - // descriptors with the same field number. - return nil, errors.New("proto: descriptor conflict") - } - return e.value, nil - } - - v, err := decodeExtension(e.enc, extension) - if err != nil { - return nil, err - } - - // Remember the decoded version and drop the encoded version. - // That way it is safe to mutate what we return. - e.value = v - e.desc = extension - e.enc = nil - emap[extension.Field] = e - return e.value, nil -} - -// defaultExtensionValue returns the default value for extension. -// If no default for an extension is defined ErrMissingExtension is returned. -func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { - t := reflect.TypeOf(extension.ExtensionType) - props := extensionProperties(extension) - - sf, _, err := fieldDefault(t, props) - if err != nil { - return nil, err - } - - if sf == nil || sf.value == nil { - // There is no default value. - return nil, ErrMissingExtension - } - - if t.Kind() != reflect.Ptr { - // We do not need to return a Ptr, we can directly return sf.value. - return sf.value, nil - } - - // We need to return an interface{} that is a pointer to sf.value. - value := reflect.New(t).Elem() - value.Set(reflect.New(value.Type().Elem())) - if sf.kind == reflect.Int32 { - // We may have an int32 or an enum, but the underlying data is int32. - // Since we can't set an int32 into a non int32 reflect.value directly - // set it as a int32. - value.Elem().SetInt(int64(sf.value.(int32))) - } else { - value.Elem().Set(reflect.ValueOf(sf.value)) - } - return value.Interface(), nil -} - -// decodeExtension decodes an extension encoded in b. -func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { - o := NewBuffer(b) - - t := reflect.TypeOf(extension.ExtensionType) - - props := extensionProperties(extension) - - // t is a pointer to a struct, pointer to basic type or a slice. - // Allocate a "field" to store the pointer/slice itself; the - // pointer/slice will be stored here. We pass - // the address of this field to props.dec. - // This passes a zero field and a *t and lets props.dec - // interpret it as a *struct{ x t }. - value := reflect.New(t).Elem() - - for { - // Discard wire type and field number varint. It isn't needed. - if _, err := o.DecodeVarint(); err != nil { - return nil, err - } - - if err := props.dec(o, props, toStructPointer(value.Addr())); err != nil { - return nil, err - } - - if o.index >= len(o.buf) { - break - } - } - return value.Interface(), nil -} - -// GetExtensions returns a slice of the extensions present in pb that are also listed in es. -// The returned slice has the same length as es; missing extensions will appear as nil elements. -func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { - epb, ok := extendable(pb) - if !ok { - return nil, errors.New("proto: not an extendable proto") - } - extensions = make([]interface{}, len(es)) - for i, e := range es { - extensions[i], err = GetExtension(epb, e) - if err == ErrMissingExtension { - err = nil - } - if err != nil { - return - } - } - return -} - -// ExtensionDescs returns a new slice containing pb's extension descriptors, in undefined order. -// For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing -// just the Field field, which defines the extension's field number. -func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { - epb, ok := extendable(pb) - if !ok { - return nil, fmt.Errorf("proto: %T is not an extendable proto.Message", pb) - } - registeredExtensions := RegisteredExtensions(pb) - - emap, mu := epb.extensionsRead() - if emap == nil { - return nil, nil - } - mu.Lock() - defer mu.Unlock() - extensions := make([]*ExtensionDesc, 0, len(emap)) - for extid, e := range emap { - desc := e.desc - if desc == nil { - desc = registeredExtensions[extid] - if desc == nil { - desc = &ExtensionDesc{Field: extid} - } - } - - extensions = append(extensions, desc) - } - return extensions, nil -} - -// SetExtension sets the specified extension of pb to the specified value. -func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { - epb, ok := extendable(pb) - if !ok { - return errors.New("proto: not an extendable proto") - } - if err := checkExtensionTypes(epb, extension); err != nil { - return err - } - typ := reflect.TypeOf(extension.ExtensionType) - if typ != reflect.TypeOf(value) { - return errors.New("proto: bad extension value type") - } - // nil extension values need to be caught early, because the - // encoder can't distinguish an ErrNil due to a nil extension - // from an ErrNil due to a missing field. Extensions are - // always optional, so the encoder would just swallow the error - // and drop all the extensions from the encoded message. - if reflect.ValueOf(value).IsNil() { - return fmt.Errorf("proto: SetExtension called with nil value of type %T", value) - } - - extmap := epb.extensionsWrite() - extmap[extension.Field] = Extension{desc: extension, value: value} - return nil -} - -// ClearAllExtensions clears all extensions from pb. -func ClearAllExtensions(pb Message) { - epb, ok := extendable(pb) - if !ok { - return - } - m := epb.extensionsWrite() - for k := range m { - delete(m, k) - } -} - -// A global registry of extensions. -// The generated code will register the generated descriptors by calling RegisterExtension. - -var extensionMaps = make(map[reflect.Type]map[int32]*ExtensionDesc) - -// RegisterExtension is called from the generated code. -func RegisterExtension(desc *ExtensionDesc) { - st := reflect.TypeOf(desc.ExtendedType).Elem() - m := extensionMaps[st] - if m == nil { - m = make(map[int32]*ExtensionDesc) - extensionMaps[st] = m - } - if _, ok := m[desc.Field]; ok { - panic("proto: duplicate extension registered: " + st.String() + " " + strconv.Itoa(int(desc.Field))) - } - m[desc.Field] = desc -} - -// RegisteredExtensions returns a map of the registered extensions of a -// protocol buffer struct, indexed by the extension number. -// The argument pb should be a nil pointer to the struct type. -func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc { - return extensionMaps[reflect.TypeOf(pb).Elem()] -} diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go deleted file mode 100644 index ac4ddbc..0000000 --- a/vendor/github.com/golang/protobuf/proto/lib.go +++ /dev/null @@ -1,898 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* -Package proto converts data structures to and from the wire format of -protocol buffers. It works in concert with the Go source code generated -for .proto files by the protocol compiler. - -A summary of the properties of the protocol buffer interface -for a protocol buffer variable v: - - - Names are turned from camel_case to CamelCase for export. - - There are no methods on v to set fields; just treat - them as structure fields. - - There are getters that return a field's value if set, - and return the field's default value if unset. - The getters work even if the receiver is a nil message. - - The zero value for a struct is its correct initialization state. - All desired fields must be set before marshaling. - - A Reset() method will restore a protobuf struct to its zero state. - - Non-repeated fields are pointers to the values; nil means unset. - That is, optional or required field int32 f becomes F *int32. - - Repeated fields are slices. - - Helper functions are available to aid the setting of fields. - msg.Foo = proto.String("hello") // set field - - Constants are defined to hold the default values of all fields that - have them. They have the form Default_StructName_FieldName. - Because the getter methods handle defaulted values, - direct use of these constants should be rare. - - Enums are given type names and maps from names to values. - Enum values are prefixed by the enclosing message's name, or by the - enum's type name if it is a top-level enum. Enum types have a String - method, and a Enum method to assist in message construction. - - Nested messages, groups and enums have type names prefixed with the name of - the surrounding message type. - - Extensions are given descriptor names that start with E_, - followed by an underscore-delimited list of the nested messages - that contain it (if any) followed by the CamelCased name of the - extension field itself. HasExtension, ClearExtension, GetExtension - and SetExtension are functions for manipulating extensions. - - Oneof field sets are given a single field in their message, - with distinguished wrapper types for each possible field value. - - Marshal and Unmarshal are functions to encode and decode the wire format. - -When the .proto file specifies `syntax="proto3"`, there are some differences: - - - Non-repeated fields of non-message type are values instead of pointers. - - Getters are only generated for message and oneof fields. - - Enum types do not get an Enum method. - -The simplest way to describe this is to see an example. -Given file test.proto, containing - - package example; - - enum FOO { X = 17; } - - message Test { - required string label = 1; - optional int32 type = 2 [default=77]; - repeated int64 reps = 3; - optional group OptionalGroup = 4 { - required string RequiredField = 5; - } - oneof union { - int32 number = 6; - string name = 7; - } - } - -The resulting file, test.pb.go, is: - - package example - - import proto "github.com/golang/protobuf/proto" - import math "math" - - type FOO int32 - const ( - FOO_X FOO = 17 - ) - var FOO_name = map[int32]string{ - 17: "X", - } - var FOO_value = map[string]int32{ - "X": 17, - } - - func (x FOO) Enum() *FOO { - p := new(FOO) - *p = x - return p - } - func (x FOO) String() string { - return proto.EnumName(FOO_name, int32(x)) - } - func (x *FOO) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FOO_value, data) - if err != nil { - return err - } - *x = FOO(value) - return nil - } - - type Test struct { - Label *string `protobuf:"bytes,1,req,name=label" json:"label,omitempty"` - Type *int32 `protobuf:"varint,2,opt,name=type,def=77" json:"type,omitempty"` - Reps []int64 `protobuf:"varint,3,rep,name=reps" json:"reps,omitempty"` - Optionalgroup *Test_OptionalGroup `protobuf:"group,4,opt,name=OptionalGroup" json:"optionalgroup,omitempty"` - // Types that are valid to be assigned to Union: - // *Test_Number - // *Test_Name - Union isTest_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` - } - func (m *Test) Reset() { *m = Test{} } - func (m *Test) String() string { return proto.CompactTextString(m) } - func (*Test) ProtoMessage() {} - - type isTest_Union interface { - isTest_Union() - } - - type Test_Number struct { - Number int32 `protobuf:"varint,6,opt,name=number"` - } - type Test_Name struct { - Name string `protobuf:"bytes,7,opt,name=name"` - } - - func (*Test_Number) isTest_Union() {} - func (*Test_Name) isTest_Union() {} - - func (m *Test) GetUnion() isTest_Union { - if m != nil { - return m.Union - } - return nil - } - const Default_Test_Type int32 = 77 - - func (m *Test) GetLabel() string { - if m != nil && m.Label != nil { - return *m.Label - } - return "" - } - - func (m *Test) GetType() int32 { - if m != nil && m.Type != nil { - return *m.Type - } - return Default_Test_Type - } - - func (m *Test) GetOptionalgroup() *Test_OptionalGroup { - if m != nil { - return m.Optionalgroup - } - return nil - } - - type Test_OptionalGroup struct { - RequiredField *string `protobuf:"bytes,5,req" json:"RequiredField,omitempty"` - } - func (m *Test_OptionalGroup) Reset() { *m = Test_OptionalGroup{} } - func (m *Test_OptionalGroup) String() string { return proto.CompactTextString(m) } - - func (m *Test_OptionalGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" - } - - func (m *Test) GetNumber() int32 { - if x, ok := m.GetUnion().(*Test_Number); ok { - return x.Number - } - return 0 - } - - func (m *Test) GetName() string { - if x, ok := m.GetUnion().(*Test_Name); ok { - return x.Name - } - return "" - } - - func init() { - proto.RegisterEnum("example.FOO", FOO_name, FOO_value) - } - -To create and play with a Test object: - - package main - - import ( - "log" - - "github.com/golang/protobuf/proto" - pb "./example.pb" - ) - - func main() { - test := &pb.Test{ - Label: proto.String("hello"), - Type: proto.Int32(17), - Reps: []int64{1, 2, 3}, - Optionalgroup: &pb.Test_OptionalGroup{ - RequiredField: proto.String("good bye"), - }, - Union: &pb.Test_Name{"fred"}, - } - data, err := proto.Marshal(test) - if err != nil { - log.Fatal("marshaling error: ", err) - } - newTest := &pb.Test{} - err = proto.Unmarshal(data, newTest) - if err != nil { - log.Fatal("unmarshaling error: ", err) - } - // Now test and newTest contain the same data. - if test.GetLabel() != newTest.GetLabel() { - log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel()) - } - // Use a type switch to determine which oneof was set. - switch u := test.Union.(type) { - case *pb.Test_Number: // u.Number contains the number. - case *pb.Test_Name: // u.Name contains the string. - } - // etc. - } -*/ -package proto - -import ( - "encoding/json" - "fmt" - "log" - "reflect" - "sort" - "strconv" - "sync" -) - -// Message is implemented by generated protocol buffer messages. -type Message interface { - Reset() - String() string - ProtoMessage() -} - -// Stats records allocation details about the protocol buffer encoders -// and decoders. Useful for tuning the library itself. -type Stats struct { - Emalloc uint64 // mallocs in encode - Dmalloc uint64 // mallocs in decode - Encode uint64 // number of encodes - Decode uint64 // number of decodes - Chit uint64 // number of cache hits - Cmiss uint64 // number of cache misses - Size uint64 // number of sizes -} - -// Set to true to enable stats collection. -const collectStats = false - -var stats Stats - -// GetStats returns a copy of the global Stats structure. -func GetStats() Stats { return stats } - -// A Buffer is a buffer manager for marshaling and unmarshaling -// protocol buffers. It may be reused between invocations to -// reduce memory usage. It is not necessary to use a Buffer; -// the global functions Marshal and Unmarshal create a -// temporary Buffer and are fine for most applications. -type Buffer struct { - buf []byte // encode/decode byte stream - index int // read point - - // pools of basic types to amortize allocation. - bools []bool - uint32s []uint32 - uint64s []uint64 - - // extra pools, only used with pointer_reflect.go - int32s []int32 - int64s []int64 - float32s []float32 - float64s []float64 -} - -// NewBuffer allocates a new Buffer and initializes its internal data to -// the contents of the argument slice. -func NewBuffer(e []byte) *Buffer { - return &Buffer{buf: e} -} - -// Reset resets the Buffer, ready for marshaling a new protocol buffer. -func (p *Buffer) Reset() { - p.buf = p.buf[0:0] // for reading/writing - p.index = 0 // for reading -} - -// SetBuf replaces the internal buffer with the slice, -// ready for unmarshaling the contents of the slice. -func (p *Buffer) SetBuf(s []byte) { - p.buf = s - p.index = 0 -} - -// Bytes returns the contents of the Buffer. -func (p *Buffer) Bytes() []byte { return p.buf } - -/* - * Helper routines for simplifying the creation of optional fields of basic type. - */ - -// Bool is a helper routine that allocates a new bool value -// to store v and returns a pointer to it. -func Bool(v bool) *bool { - return &v -} - -// Int32 is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it. -func Int32(v int32) *int32 { - return &v -} - -// Int is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it, but unlike Int32 -// its argument value is an int. -func Int(v int) *int32 { - p := new(int32) - *p = int32(v) - return p -} - -// Int64 is a helper routine that allocates a new int64 value -// to store v and returns a pointer to it. -func Int64(v int64) *int64 { - return &v -} - -// Float32 is a helper routine that allocates a new float32 value -// to store v and returns a pointer to it. -func Float32(v float32) *float32 { - return &v -} - -// Float64 is a helper routine that allocates a new float64 value -// to store v and returns a pointer to it. -func Float64(v float64) *float64 { - return &v -} - -// Uint32 is a helper routine that allocates a new uint32 value -// to store v and returns a pointer to it. -func Uint32(v uint32) *uint32 { - return &v -} - -// Uint64 is a helper routine that allocates a new uint64 value -// to store v and returns a pointer to it. -func Uint64(v uint64) *uint64 { - return &v -} - -// String is a helper routine that allocates a new string value -// to store v and returns a pointer to it. -func String(v string) *string { - return &v -} - -// EnumName is a helper function to simplify printing protocol buffer enums -// by name. Given an enum map and a value, it returns a useful string. -func EnumName(m map[int32]string, v int32) string { - s, ok := m[v] - if ok { - return s - } - return strconv.Itoa(int(v)) -} - -// UnmarshalJSONEnum is a helper function to simplify recovering enum int values -// from their JSON-encoded representation. Given a map from the enum's symbolic -// names to its int values, and a byte buffer containing the JSON-encoded -// value, it returns an int32 that can be cast to the enum type by the caller. -// -// The function can deal with both JSON representations, numeric and symbolic. -func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { - if data[0] == '"' { - // New style: enums are strings. - var repr string - if err := json.Unmarshal(data, &repr); err != nil { - return -1, err - } - val, ok := m[repr] - if !ok { - return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) - } - return val, nil - } - // Old style: enums are ints. - var val int32 - if err := json.Unmarshal(data, &val); err != nil { - return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) - } - return val, nil -} - -// DebugPrint dumps the encoded data in b in a debugging format with a header -// including the string s. Used in testing but made available for general debugging. -func (p *Buffer) DebugPrint(s string, b []byte) { - var u uint64 - - obuf := p.buf - index := p.index - p.buf = b - p.index = 0 - depth := 0 - - fmt.Printf("\n--- %s ---\n", s) - -out: - for { - for i := 0; i < depth; i++ { - fmt.Print(" ") - } - - index := p.index - if index == len(p.buf) { - break - } - - op, err := p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: fetching op err %v\n", index, err) - break out - } - tag := op >> 3 - wire := op & 7 - - switch wire { - default: - fmt.Printf("%3d: t=%3d unknown wire=%d\n", - index, tag, wire) - break out - - case WireBytes: - var r []byte - - r, err = p.DecodeRawBytes(false) - if err != nil { - break out - } - fmt.Printf("%3d: t=%3d bytes [%d]", index, tag, len(r)) - if len(r) <= 6 { - for i := 0; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } else { - for i := 0; i < 3; i++ { - fmt.Printf(" %.2x", r[i]) - } - fmt.Printf(" ..") - for i := len(r) - 3; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } - fmt.Printf("\n") - - case WireFixed32: - u, err = p.DecodeFixed32() - if err != nil { - fmt.Printf("%3d: t=%3d fix32 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix32 %d\n", index, tag, u) - - case WireFixed64: - u, err = p.DecodeFixed64() - if err != nil { - fmt.Printf("%3d: t=%3d fix64 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix64 %d\n", index, tag, u) - - case WireVarint: - u, err = p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: t=%3d varint err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d varint %d\n", index, tag, u) - - case WireStartGroup: - fmt.Printf("%3d: t=%3d start\n", index, tag) - depth++ - - case WireEndGroup: - depth-- - fmt.Printf("%3d: t=%3d end\n", index, tag) - } - } - - if depth != 0 { - fmt.Printf("%3d: start-end not balanced %d\n", p.index, depth) - } - fmt.Printf("\n") - - p.buf = obuf - p.index = index -} - -// SetDefaults sets unset protocol buffer fields to their default values. -// It only modifies fields that are both unset and have defined defaults. -// It recursively sets default values in any non-nil sub-messages. -func SetDefaults(pb Message) { - setDefaults(reflect.ValueOf(pb), true, false) -} - -// v is a pointer to a struct. -func setDefaults(v reflect.Value, recur, zeros bool) { - v = v.Elem() - - defaultMu.RLock() - dm, ok := defaults[v.Type()] - defaultMu.RUnlock() - if !ok { - dm = buildDefaultMessage(v.Type()) - defaultMu.Lock() - defaults[v.Type()] = dm - defaultMu.Unlock() - } - - for _, sf := range dm.scalars { - f := v.Field(sf.index) - if !f.IsNil() { - // field already set - continue - } - dv := sf.value - if dv == nil && !zeros { - // no explicit default, and don't want to set zeros - continue - } - fptr := f.Addr().Interface() // **T - // TODO: Consider batching the allocations we do here. - switch sf.kind { - case reflect.Bool: - b := new(bool) - if dv != nil { - *b = dv.(bool) - } - *(fptr.(**bool)) = b - case reflect.Float32: - f := new(float32) - if dv != nil { - *f = dv.(float32) - } - *(fptr.(**float32)) = f - case reflect.Float64: - f := new(float64) - if dv != nil { - *f = dv.(float64) - } - *(fptr.(**float64)) = f - case reflect.Int32: - // might be an enum - if ft := f.Type(); ft != int32PtrType { - // enum - f.Set(reflect.New(ft.Elem())) - if dv != nil { - f.Elem().SetInt(int64(dv.(int32))) - } - } else { - // int32 field - i := new(int32) - if dv != nil { - *i = dv.(int32) - } - *(fptr.(**int32)) = i - } - case reflect.Int64: - i := new(int64) - if dv != nil { - *i = dv.(int64) - } - *(fptr.(**int64)) = i - case reflect.String: - s := new(string) - if dv != nil { - *s = dv.(string) - } - *(fptr.(**string)) = s - case reflect.Uint8: - // exceptional case: []byte - var b []byte - if dv != nil { - db := dv.([]byte) - b = make([]byte, len(db)) - copy(b, db) - } else { - b = []byte{} - } - *(fptr.(*[]byte)) = b - case reflect.Uint32: - u := new(uint32) - if dv != nil { - *u = dv.(uint32) - } - *(fptr.(**uint32)) = u - case reflect.Uint64: - u := new(uint64) - if dv != nil { - *u = dv.(uint64) - } - *(fptr.(**uint64)) = u - default: - log.Printf("proto: can't set default for field %v (sf.kind=%v)", f, sf.kind) - } - } - - for _, ni := range dm.nested { - f := v.Field(ni) - // f is *T or []*T or map[T]*T - switch f.Kind() { - case reflect.Ptr: - if f.IsNil() { - continue - } - setDefaults(f, recur, zeros) - - case reflect.Slice: - for i := 0; i < f.Len(); i++ { - e := f.Index(i) - if e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - - case reflect.Map: - for _, k := range f.MapKeys() { - e := f.MapIndex(k) - if e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - } - } -} - -var ( - // defaults maps a protocol buffer struct type to a slice of the fields, - // with its scalar fields set to their proto-declared non-zero default values. - defaultMu sync.RWMutex - defaults = make(map[reflect.Type]defaultMessage) - - int32PtrType = reflect.TypeOf((*int32)(nil)) -) - -// defaultMessage represents information about the default values of a message. -type defaultMessage struct { - scalars []scalarField - nested []int // struct field index of nested messages -} - -type scalarField struct { - index int // struct field index - kind reflect.Kind // element type (the T in *T or []T) - value interface{} // the proto-declared default value, or nil -} - -// t is a struct type. -func buildDefaultMessage(t reflect.Type) (dm defaultMessage) { - sprop := GetProperties(t) - for _, prop := range sprop.Prop { - fi, ok := sprop.decoderTags.get(prop.Tag) - if !ok { - // XXX_unrecognized - continue - } - ft := t.Field(fi).Type - - sf, nested, err := fieldDefault(ft, prop) - switch { - case err != nil: - log.Print(err) - case nested: - dm.nested = append(dm.nested, fi) - case sf != nil: - sf.index = fi - dm.scalars = append(dm.scalars, *sf) - } - } - - return dm -} - -// fieldDefault returns the scalarField for field type ft. -// sf will be nil if the field can not have a default. -// nestedMessage will be true if this is a nested message. -// Note that sf.index is not set on return. -func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMessage bool, err error) { - var canHaveDefault bool - switch ft.Kind() { - case reflect.Ptr: - if ft.Elem().Kind() == reflect.Struct { - nestedMessage = true - } else { - canHaveDefault = true // proto2 scalar field - } - - case reflect.Slice: - switch ft.Elem().Kind() { - case reflect.Ptr: - nestedMessage = true // repeated message - case reflect.Uint8: - canHaveDefault = true // bytes field - } - - case reflect.Map: - if ft.Elem().Kind() == reflect.Ptr { - nestedMessage = true // map with message values - } - } - - if !canHaveDefault { - if nestedMessage { - return nil, true, nil - } - return nil, false, nil - } - - // We now know that ft is a pointer or slice. - sf = &scalarField{kind: ft.Elem().Kind()} - - // scalar fields without defaults - if !prop.HasDefault { - return sf, false, nil - } - - // a scalar field: either *T or []byte - switch ft.Elem().Kind() { - case reflect.Bool: - x, err := strconv.ParseBool(prop.Default) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default bool %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Float32: - x, err := strconv.ParseFloat(prop.Default, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float32 %q: %v", prop.Default, err) - } - sf.value = float32(x) - case reflect.Float64: - x, err := strconv.ParseFloat(prop.Default, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Int32: - x, err := strconv.ParseInt(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int32 %q: %v", prop.Default, err) - } - sf.value = int32(x) - case reflect.Int64: - x, err := strconv.ParseInt(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.String: - sf.value = prop.Default - case reflect.Uint8: - // []byte (not *uint8) - sf.value = []byte(prop.Default) - case reflect.Uint32: - x, err := strconv.ParseUint(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint32 %q: %v", prop.Default, err) - } - sf.value = uint32(x) - case reflect.Uint64: - x, err := strconv.ParseUint(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint64 %q: %v", prop.Default, err) - } - sf.value = x - default: - return nil, false, fmt.Errorf("proto: unhandled def kind %v", ft.Elem().Kind()) - } - - return sf, false, nil -} - -// Map fields may have key types of non-float scalars, strings and enums. -// The easiest way to sort them in some deterministic order is to use fmt. -// If this turns out to be inefficient we can always consider other options, -// such as doing a Schwartzian transform. - -func mapKeys(vs []reflect.Value) sort.Interface { - s := mapKeySorter{ - vs: vs, - // default Less function: textual comparison - less: func(a, b reflect.Value) bool { - return fmt.Sprint(a.Interface()) < fmt.Sprint(b.Interface()) - }, - } - - // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps; - // numeric keys are sorted numerically. - if len(vs) == 0 { - return s - } - switch vs[0].Kind() { - case reflect.Int32, reflect.Int64: - s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } - case reflect.Uint32, reflect.Uint64: - s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } - } - - return s -} - -type mapKeySorter struct { - vs []reflect.Value - less func(a, b reflect.Value) bool -} - -func (s mapKeySorter) Len() int { return len(s.vs) } -func (s mapKeySorter) Swap(i, j int) { s.vs[i], s.vs[j] = s.vs[j], s.vs[i] } -func (s mapKeySorter) Less(i, j int) bool { - return s.less(s.vs[i], s.vs[j]) -} - -// isProto3Zero reports whether v is a zero proto3 value. -func isProto3Zero(v reflect.Value) bool { - switch v.Kind() { - case reflect.Bool: - return !v.Bool() - case reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint32, reflect.Uint64: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.String: - return v.String() == "" - } - return false -} - -// ProtoPackageIsVersion2 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion2 = true - -// ProtoPackageIsVersion1 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion1 = true diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go deleted file mode 100644 index fd982de..0000000 --- a/vendor/github.com/golang/protobuf/proto/message_set.go +++ /dev/null @@ -1,311 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Support for message sets. - */ - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "reflect" - "sort" -) - -// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. -// A message type ID is required for storing a protocol buffer in a message set. -var errNoMessageTypeID = errors.New("proto does not have a message type ID") - -// The first two types (_MessageSet_Item and messageSet) -// model what the protocol compiler produces for the following protocol message: -// message MessageSet { -// repeated group Item = 1 { -// required int32 type_id = 2; -// required string message = 3; -// }; -// } -// That is the MessageSet wire format. We can't use a proto to generate these -// because that would introduce a circular dependency between it and this package. - -type _MessageSet_Item struct { - TypeId *int32 `protobuf:"varint,2,req,name=type_id"` - Message []byte `protobuf:"bytes,3,req,name=message"` -} - -type messageSet struct { - Item []*_MessageSet_Item `protobuf:"group,1,rep"` - XXX_unrecognized []byte - // TODO: caching? -} - -// Make sure messageSet is a Message. -var _ Message = (*messageSet)(nil) - -// messageTypeIder is an interface satisfied by a protocol buffer type -// that may be stored in a MessageSet. -type messageTypeIder interface { - MessageTypeId() int32 -} - -func (ms *messageSet) find(pb Message) *_MessageSet_Item { - mti, ok := pb.(messageTypeIder) - if !ok { - return nil - } - id := mti.MessageTypeId() - for _, item := range ms.Item { - if *item.TypeId == id { - return item - } - } - return nil -} - -func (ms *messageSet) Has(pb Message) bool { - if ms.find(pb) != nil { - return true - } - return false -} - -func (ms *messageSet) Unmarshal(pb Message) error { - if item := ms.find(pb); item != nil { - return Unmarshal(item.Message, pb) - } - if _, ok := pb.(messageTypeIder); !ok { - return errNoMessageTypeID - } - return nil // TODO: return error instead? -} - -func (ms *messageSet) Marshal(pb Message) error { - msg, err := Marshal(pb) - if err != nil { - return err - } - if item := ms.find(pb); item != nil { - // reuse existing item - item.Message = msg - return nil - } - - mti, ok := pb.(messageTypeIder) - if !ok { - return errNoMessageTypeID - } - - mtid := mti.MessageTypeId() - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: &mtid, - Message: msg, - }) - return nil -} - -func (ms *messageSet) Reset() { *ms = messageSet{} } -func (ms *messageSet) String() string { return CompactTextString(ms) } -func (*messageSet) ProtoMessage() {} - -// Support for the message_set_wire_format message option. - -func skipVarint(buf []byte) []byte { - i := 0 - for ; buf[i]&0x80 != 0; i++ { - } - return buf[i+1:] -} - -// MarshalMessageSet encodes the extension map represented by m in the message set wire format. -// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSet(exts interface{}) ([]byte, error) { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - if err := encodeExtensions(exts); err != nil { - return nil, err - } - m, _ = exts.extensionsRead() - case map[int32]Extension: - if err := encodeExtensionsMap(exts); err != nil { - return nil, err - } - m = exts - default: - return nil, errors.New("proto: not an extension map") - } - - // Sort extension IDs to provide a deterministic encoding. - // See also enc_map in encode.go. - ids := make([]int, 0, len(m)) - for id := range m { - ids = append(ids, int(id)) - } - sort.Ints(ids) - - ms := &messageSet{Item: make([]*_MessageSet_Item, 0, len(m))} - for _, id := range ids { - e := m[int32(id)] - // Remove the wire type and field number varint, as well as the length varint. - msg := skipVarint(skipVarint(e.enc)) - - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: Int32(int32(id)), - Message: msg, - }) - } - return Marshal(ms) -} - -// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. -// It is called by generated Unmarshal methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSet(buf []byte, exts interface{}) error { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - m = exts.extensionsWrite() - case map[int32]Extension: - m = exts - default: - return errors.New("proto: not an extension map") - } - - ms := new(messageSet) - if err := Unmarshal(buf, ms); err != nil { - return err - } - for _, item := range ms.Item { - id := *item.TypeId - msg := item.Message - - // Restore wire type and field number varint, plus length varint. - // Be careful to preserve duplicate items. - b := EncodeVarint(uint64(id)<<3 | WireBytes) - if ext, ok := m[id]; ok { - // Existing data; rip off the tag and length varint - // so we join the new data correctly. - // We can assume that ext.enc is set because we are unmarshaling. - o := ext.enc[len(b):] // skip wire type and field number - _, n := DecodeVarint(o) // calculate length of length varint - o = o[n:] // skip length varint - msg = append(o, msg...) // join old data and new data - } - b = append(b, EncodeVarint(uint64(len(msg)))...) - b = append(b, msg...) - - m[id] = Extension{enc: b} - } - return nil -} - -// MarshalMessageSetJSON encodes the extension map represented by m in JSON format. -// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - m, _ = exts.extensionsRead() - case map[int32]Extension: - m = exts - default: - return nil, errors.New("proto: not an extension map") - } - var b bytes.Buffer - b.WriteByte('{') - - // Process the map in key order for deterministic output. - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) // int32Slice defined in text.go - - for i, id := range ids { - ext := m[id] - if i > 0 { - b.WriteByte(',') - } - - msd, ok := messageSetMap[id] - if !ok { - // Unknown type; we can't render it, so skip it. - continue - } - fmt.Fprintf(&b, `"[%s]":`, msd.name) - - x := ext.value - if x == nil { - x = reflect.New(msd.t.Elem()).Interface() - if err := Unmarshal(ext.enc, x.(Message)); err != nil { - return nil, err - } - } - d, err := json.Marshal(x) - if err != nil { - return nil, err - } - b.Write(d) - } - b.WriteByte('}') - return b.Bytes(), nil -} - -// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format. -// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error { - // Common-case fast path. - if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) { - return nil - } - - // This is fairly tricky, and it's not clear that it is needed. - return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented") -} - -// A global registry of types that can be used in a MessageSet. - -var messageSetMap = make(map[int32]messageSetDesc) - -type messageSetDesc struct { - t reflect.Type // pointer to struct - name string -} - -// RegisterMessageSetType is called from the generated code. -func RegisterMessageSetType(m Message, fieldNum int32, name string) { - messageSetMap[fieldNum] = messageSetDesc{ - t: reflect.TypeOf(m), - name: name, - } -} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go deleted file mode 100644 index fb512e2..0000000 --- a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go +++ /dev/null @@ -1,484 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build appengine js - -// This file contains an implementation of proto field accesses using package reflect. -// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can -// be used on App Engine. - -package proto - -import ( - "math" - "reflect" -) - -// A structPointer is a pointer to a struct. -type structPointer struct { - v reflect.Value -} - -// toStructPointer returns a structPointer equivalent to the given reflect value. -// The reflect value must itself be a pointer to a struct. -func toStructPointer(v reflect.Value) structPointer { - return structPointer{v} -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p.v.IsNil() -} - -// Interface returns the struct pointer as an interface value. -func structPointer_Interface(p structPointer, _ reflect.Type) interface{} { - return p.v.Interface() -} - -// A field identifies a field in a struct, accessible from a structPointer. -// In this implementation, a field is identified by the sequence of field indices -// passed to reflect's FieldByIndex. -type field []int - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return f.Index -} - -// invalidField is an invalid field identifier. -var invalidField = field(nil) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { return f != nil } - -// field returns the given field in the struct as a reflect value. -func structPointer_field(p structPointer, f field) reflect.Value { - // Special case: an extension map entry with a value of type T - // passes a *T to the struct-handling code with a zero field, - // expecting that it will be treated as equivalent to *struct{ X T }, - // which has the same memory layout. We have to handle that case - // specially, because reflect will panic if we call FieldByIndex on a - // non-struct. - if f == nil { - return p.v.Elem() - } - - return p.v.Elem().FieldByIndex(f) -} - -// ifield returns the given field in the struct as an interface value. -func structPointer_ifield(p structPointer, f field) interface{} { - return structPointer_field(p, f).Addr().Interface() -} - -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return structPointer_ifield(p, f).(*[]byte) -} - -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return structPointer_ifield(p, f).(*[][]byte) -} - -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return structPointer_ifield(p, f).(**bool) -} - -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return structPointer_ifield(p, f).(*bool) -} - -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return structPointer_ifield(p, f).(*[]bool) -} - -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return structPointer_ifield(p, f).(**string) -} - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return structPointer_ifield(p, f).(*string) -} - -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return structPointer_ifield(p, f).(*[]string) -} - -// Extensions returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return structPointer_ifield(p, f).(*XXX_InternalExtensions) -} - -// ExtMap returns the address of an extension map field in the struct. -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return structPointer_ifield(p, f).(*map[int32]Extension) -} - -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return structPointer_field(p, f).Addr() -} - -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - structPointer_field(p, f).Set(q.v) -} - -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return structPointer{structPointer_field(p, f)} -} - -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) structPointerSlice { - return structPointerSlice{structPointer_field(p, f)} -} - -// A structPointerSlice represents the address of a slice of pointers to structs -// (themselves messages or groups). That is, v.Type() is *[]*struct{...}. -type structPointerSlice struct { - v reflect.Value -} - -func (p structPointerSlice) Len() int { return p.v.Len() } -func (p structPointerSlice) Index(i int) structPointer { return structPointer{p.v.Index(i)} } -func (p structPointerSlice) Append(q structPointer) { - p.v.Set(reflect.Append(p.v, q.v)) -} - -var ( - int32Type = reflect.TypeOf(int32(0)) - uint32Type = reflect.TypeOf(uint32(0)) - float32Type = reflect.TypeOf(float32(0)) - int64Type = reflect.TypeOf(int64(0)) - uint64Type = reflect.TypeOf(uint64(0)) - float64Type = reflect.TypeOf(float64(0)) -) - -// A word32 represents a field of type *int32, *uint32, *float32, or *enum. -// That is, v.Type() is *int32, *uint32, *float32, or *enum and v is assignable. -type word32 struct { - v reflect.Value -} - -// IsNil reports whether p is nil. -func word32_IsNil(p word32) bool { - return p.v.IsNil() -} - -// Set sets p to point at a newly allocated word with bits set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - t := p.v.Type().Elem() - switch t { - case int32Type: - if len(o.int32s) == 0 { - o.int32s = make([]int32, uint32PoolSize) - } - o.int32s[0] = int32(x) - p.v.Set(reflect.ValueOf(&o.int32s[0])) - o.int32s = o.int32s[1:] - return - case uint32Type: - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) - } - o.uint32s[0] = x - p.v.Set(reflect.ValueOf(&o.uint32s[0])) - o.uint32s = o.uint32s[1:] - return - case float32Type: - if len(o.float32s) == 0 { - o.float32s = make([]float32, uint32PoolSize) - } - o.float32s[0] = math.Float32frombits(x) - p.v.Set(reflect.ValueOf(&o.float32s[0])) - o.float32s = o.float32s[1:] - return - } - - // must be enum - p.v.Set(reflect.New(t)) - p.v.Elem().SetInt(int64(int32(x))) -} - -// Get gets the bits pointed at by p, as a uint32. -func word32_Get(p word32) uint32 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") -} - -// Word32 returns a reference to a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32{structPointer_field(p, f)} -} - -// A word32Val represents a field of type int32, uint32, float32, or enum. -// That is, v.Type() is int32, uint32, float32, or enum and v is assignable. -type word32Val struct { - v reflect.Value -} - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - switch p.v.Type() { - case int32Type: - p.v.SetInt(int64(x)) - return - case uint32Type: - p.v.SetUint(uint64(x)) - return - case float32Type: - p.v.SetFloat(float64(math.Float32frombits(x))) - return - } - - // must be enum - p.v.SetInt(int64(int32(x))) -} - -// Get gets the bits pointed at by p, as a uint32. -func word32Val_Get(p word32Val) uint32 { - elem := p.v - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") -} - -// Word32Val returns a reference to a int32, uint32, float32, or enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val{structPointer_field(p, f)} -} - -// A word32Slice is a slice of 32-bit values. -// That is, v.Type() is []int32, []uint32, []float32, or []enum. -type word32Slice struct { - v reflect.Value -} - -func (p word32Slice) Append(x uint32) { - n, m := p.v.Len(), p.v.Cap() - if n < m { - p.v.SetLen(n + 1) - } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) - } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int32: - elem.SetInt(int64(int32(x))) - case reflect.Uint32: - elem.SetUint(uint64(x)) - case reflect.Float32: - elem.SetFloat(float64(math.Float32frombits(x))) - } -} - -func (p word32Slice) Len() int { - return p.v.Len() -} - -func (p word32Slice) Index(i int) uint32 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int32: - return uint32(elem.Int()) - case reflect.Uint32: - return uint32(elem.Uint()) - case reflect.Float32: - return math.Float32bits(float32(elem.Float())) - } - panic("unreachable") -} - -// Word32Slice returns a reference to a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) word32Slice { - return word32Slice{structPointer_field(p, f)} -} - -// word64 is like word32 but for 64-bit values. -type word64 struct { - v reflect.Value -} - -func word64_Set(p word64, o *Buffer, x uint64) { - t := p.v.Type().Elem() - switch t { - case int64Type: - if len(o.int64s) == 0 { - o.int64s = make([]int64, uint64PoolSize) - } - o.int64s[0] = int64(x) - p.v.Set(reflect.ValueOf(&o.int64s[0])) - o.int64s = o.int64s[1:] - return - case uint64Type: - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) - } - o.uint64s[0] = x - p.v.Set(reflect.ValueOf(&o.uint64s[0])) - o.uint64s = o.uint64s[1:] - return - case float64Type: - if len(o.float64s) == 0 { - o.float64s = make([]float64, uint64PoolSize) - } - o.float64s[0] = math.Float64frombits(x) - p.v.Set(reflect.ValueOf(&o.float64s[0])) - o.float64s = o.float64s[1:] - return - } - panic("unreachable") -} - -func word64_IsNil(p word64) bool { - return p.v.IsNil() -} - -func word64_Get(p word64) uint64 { - elem := p.v.Elem() - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) - } - panic("unreachable") -} - -func structPointer_Word64(p structPointer, f field) word64 { - return word64{structPointer_field(p, f)} -} - -// word64Val is like word32Val but for 64-bit values. -type word64Val struct { - v reflect.Value -} - -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - switch p.v.Type() { - case int64Type: - p.v.SetInt(int64(x)) - return - case uint64Type: - p.v.SetUint(x) - return - case float64Type: - p.v.SetFloat(math.Float64frombits(x)) - return - } - panic("unreachable") -} - -func word64Val_Get(p word64Val) uint64 { - elem := p.v - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return elem.Uint() - case reflect.Float64: - return math.Float64bits(elem.Float()) - } - panic("unreachable") -} - -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val{structPointer_field(p, f)} -} - -type word64Slice struct { - v reflect.Value -} - -func (p word64Slice) Append(x uint64) { - n, m := p.v.Len(), p.v.Cap() - if n < m { - p.v.SetLen(n + 1) - } else { - t := p.v.Type().Elem() - p.v.Set(reflect.Append(p.v, reflect.Zero(t))) - } - elem := p.v.Index(n) - switch elem.Kind() { - case reflect.Int64: - elem.SetInt(int64(int64(x))) - case reflect.Uint64: - elem.SetUint(uint64(x)) - case reflect.Float64: - elem.SetFloat(float64(math.Float64frombits(x))) - } -} - -func (p word64Slice) Len() int { - return p.v.Len() -} - -func (p word64Slice) Index(i int) uint64 { - elem := p.v.Index(i) - switch elem.Kind() { - case reflect.Int64: - return uint64(elem.Int()) - case reflect.Uint64: - return uint64(elem.Uint()) - case reflect.Float64: - return math.Float64bits(float64(elem.Float())) - } - panic("unreachable") -} - -func structPointer_Word64Slice(p structPointer, f field) word64Slice { - return word64Slice{structPointer_field(p, f)} -} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go deleted file mode 100644 index 6b5567d..0000000 --- a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go +++ /dev/null @@ -1,270 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !appengine,!js - -// This file contains the implementation of the proto field accesses using package unsafe. - -package proto - -import ( - "reflect" - "unsafe" -) - -// NOTE: These type_Foo functions would more idiomatically be methods, -// but Go does not allow methods on pointer types, and we must preserve -// some pointer type for the garbage collector. We use these -// funcs with clunky names as our poor approximation to methods. -// -// An alternative would be -// type structPointer struct { p unsafe.Pointer } -// but that does not registerize as well. - -// A structPointer is a pointer to a struct. -type structPointer unsafe.Pointer - -// toStructPointer returns a structPointer equivalent to the given reflect value. -func toStructPointer(v reflect.Value) structPointer { - return structPointer(unsafe.Pointer(v.Pointer())) -} - -// IsNil reports whether p is nil. -func structPointer_IsNil(p structPointer) bool { - return p == nil -} - -// Interface returns the struct pointer, assumed to have element type t, -// as an interface value. -func structPointer_Interface(p structPointer, t reflect.Type) interface{} { - return reflect.NewAt(t, unsafe.Pointer(p)).Interface() -} - -// A field identifies a field in a struct, accessible from a structPointer. -// In this implementation, a field is identified by its byte offset from the start of the struct. -type field uintptr - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return field(f.Offset) -} - -// invalidField is an invalid field identifier. -const invalidField = ^field(0) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { - return f != ^field(0) -} - -// Bytes returns the address of a []byte field in the struct. -func structPointer_Bytes(p structPointer, f field) *[]byte { - return (*[]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// BytesSlice returns the address of a [][]byte field in the struct. -func structPointer_BytesSlice(p structPointer, f field) *[][]byte { - return (*[][]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// Bool returns the address of a *bool field in the struct. -func structPointer_Bool(p structPointer, f field) **bool { - return (**bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// BoolVal returns the address of a bool field in the struct. -func structPointer_BoolVal(p structPointer, f field) *bool { - return (*bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// BoolSlice returns the address of a []bool field in the struct. -func structPointer_BoolSlice(p structPointer, f field) *[]bool { - return (*[]bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// String returns the address of a *string field in the struct. -func structPointer_String(p structPointer, f field) **string { - return (**string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StringVal returns the address of a string field in the struct. -func structPointer_StringVal(p structPointer, f field) *string { - return (*string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StringSlice returns the address of a []string field in the struct. -func structPointer_StringSlice(p structPointer, f field) *[]string { - return (*[]string)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// ExtMap returns the address of an extension map field in the struct. -func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { - return (*XXX_InternalExtensions)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { - return (*map[int32]Extension)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// NewAt returns the reflect.Value for a pointer to a field in the struct. -func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { - return reflect.NewAt(typ, unsafe.Pointer(uintptr(p)+uintptr(f))) -} - -// SetStructPointer writes a *struct field in the struct. -func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { - *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) = q -} - -// GetStructPointer reads a *struct field in the struct. -func structPointer_GetStructPointer(p structPointer, f field) structPointer { - return *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// StructPointerSlice the address of a []*struct field in the struct. -func structPointer_StructPointerSlice(p structPointer, f field) *structPointerSlice { - return (*structPointerSlice)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// A structPointerSlice represents a slice of pointers to structs (themselves submessages or groups). -type structPointerSlice []structPointer - -func (v *structPointerSlice) Len() int { return len(*v) } -func (v *structPointerSlice) Index(i int) structPointer { return (*v)[i] } -func (v *structPointerSlice) Append(p structPointer) { *v = append(*v, p) } - -// A word32 is the address of a "pointer to 32-bit value" field. -type word32 **uint32 - -// IsNil reports whether *v is nil. -func word32_IsNil(p word32) bool { - return *p == nil -} - -// Set sets *v to point at a newly allocated word set to x. -func word32_Set(p word32, o *Buffer, x uint32) { - if len(o.uint32s) == 0 { - o.uint32s = make([]uint32, uint32PoolSize) - } - o.uint32s[0] = x - *p = &o.uint32s[0] - o.uint32s = o.uint32s[1:] -} - -// Get gets the value pointed at by *v. -func word32_Get(p word32) uint32 { - return **p -} - -// Word32 returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32(p structPointer, f field) word32 { - return word32((**uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) -} - -// A word32Val is the address of a 32-bit value field. -type word32Val *uint32 - -// Set sets *p to x. -func word32Val_Set(p word32Val, x uint32) { - *p = x -} - -// Get gets the value pointed at by p. -func word32Val_Get(p word32Val) uint32 { - return *p -} - -// Word32Val returns the address of a *int32, *uint32, *float32, or *enum field in the struct. -func structPointer_Word32Val(p structPointer, f field) word32Val { - return word32Val((*uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) -} - -// A word32Slice is a slice of 32-bit values. -type word32Slice []uint32 - -func (v *word32Slice) Append(x uint32) { *v = append(*v, x) } -func (v *word32Slice) Len() int { return len(*v) } -func (v *word32Slice) Index(i int) uint32 { return (*v)[i] } - -// Word32Slice returns the address of a []int32, []uint32, []float32, or []enum field in the struct. -func structPointer_Word32Slice(p structPointer, f field) *word32Slice { - return (*word32Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} - -// word64 is like word32 but for 64-bit values. -type word64 **uint64 - -func word64_Set(p word64, o *Buffer, x uint64) { - if len(o.uint64s) == 0 { - o.uint64s = make([]uint64, uint64PoolSize) - } - o.uint64s[0] = x - *p = &o.uint64s[0] - o.uint64s = o.uint64s[1:] -} - -func word64_IsNil(p word64) bool { - return *p == nil -} - -func word64_Get(p word64) uint64 { - return **p -} - -func structPointer_Word64(p structPointer, f field) word64 { - return word64((**uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) -} - -// word64Val is like word32Val but for 64-bit values. -type word64Val *uint64 - -func word64Val_Set(p word64Val, o *Buffer, x uint64) { - *p = x -} - -func word64Val_Get(p word64Val) uint64 { - return *p -} - -func structPointer_Word64Val(p structPointer, f field) word64Val { - return word64Val((*uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) -} - -// word64Slice is like word32Slice but for 64-bit values. -type word64Slice []uint64 - -func (v *word64Slice) Append(x uint64) { *v = append(*v, x) } -func (v *word64Slice) Len() int { return len(*v) } -func (v *word64Slice) Index(i int) uint64 { return (*v)[i] } - -func structPointer_Word64Slice(p structPointer, f field) *word64Slice { - return (*word64Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) -} diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go deleted file mode 100644 index ec2289c..0000000 --- a/vendor/github.com/golang/protobuf/proto/properties.go +++ /dev/null @@ -1,872 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - -import ( - "fmt" - "log" - "os" - "reflect" - "sort" - "strconv" - "strings" - "sync" -) - -const debug bool = false - -// Constants that identify the encoding of a value on the wire. -const ( - WireVarint = 0 - WireFixed64 = 1 - WireBytes = 2 - WireStartGroup = 3 - WireEndGroup = 4 - WireFixed32 = 5 -) - -const startSize = 10 // initial slice/string sizes - -// Encoders are defined in encode.go -// An encoder outputs the full representation of a field, including its -// tag and encoder type. -type encoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueEncoder encodes a single integer in a particular encoding. -type valueEncoder func(o *Buffer, x uint64) error - -// Sizers are defined in encode.go -// A sizer returns the encoded size of a field, including its tag and encoder -// type. -type sizer func(prop *Properties, base structPointer) int - -// A valueSizer returns the encoded size of a single integer in a particular -// encoding. -type valueSizer func(x uint64) int - -// Decoders are defined in decode.go -// A decoder creates a value from its wire representation. -// Unrecognized subelements are saved in unrec. -type decoder func(p *Buffer, prop *Properties, base structPointer) error - -// A valueDecoder decodes a single integer in a particular encoding. -type valueDecoder func(o *Buffer) (x uint64, err error) - -// A oneofMarshaler does the marshaling for all oneof fields in a message. -type oneofMarshaler func(Message, *Buffer) error - -// A oneofUnmarshaler does the unmarshaling for a oneof field in a message. -type oneofUnmarshaler func(Message, int, int, *Buffer) (bool, error) - -// A oneofSizer does the sizing for all oneof fields in a message. -type oneofSizer func(Message) int - -// tagMap is an optimization over map[int]int for typical protocol buffer -// use-cases. Encoded protocol buffers are often in tag order with small tag -// numbers. -type tagMap struct { - fastTags []int - slowTags map[int]int -} - -// tagMapFastLimit is the upper bound on the tag number that will be stored in -// the tagMap slice rather than its map. -const tagMapFastLimit = 1024 - -func (p *tagMap) get(t int) (int, bool) { - if t > 0 && t < tagMapFastLimit { - if t >= len(p.fastTags) { - return 0, false - } - fi := p.fastTags[t] - return fi, fi >= 0 - } - fi, ok := p.slowTags[t] - return fi, ok -} - -func (p *tagMap) put(t int, fi int) { - if t > 0 && t < tagMapFastLimit { - for len(p.fastTags) < t+1 { - p.fastTags = append(p.fastTags, -1) - } - p.fastTags[t] = fi - return - } - if p.slowTags == nil { - p.slowTags = make(map[int]int) - } - p.slowTags[t] = fi -} - -// StructProperties represents properties for all the fields of a struct. -// decoderTags and decoderOrigNames should only be used by the decoder. -type StructProperties struct { - Prop []*Properties // properties for each field - reqCount int // required count - decoderTags tagMap // map from proto tag to struct field number - decoderOrigNames map[string]int // map from original name to struct field number - order []int // list of struct field numbers in tag order - unrecField field // field id of the XXX_unrecognized []byte field - extendable bool // is this an extendable proto - - oneofMarshaler oneofMarshaler - oneofUnmarshaler oneofUnmarshaler - oneofSizer oneofSizer - stype reflect.Type - - // OneofTypes contains information about the oneof fields in this message. - // It is keyed by the original name of a field. - OneofTypes map[string]*OneofProperties -} - -// OneofProperties represents information about a specific field in a oneof. -type OneofProperties struct { - Type reflect.Type // pointer to generated struct type for this oneof field - Field int // struct field number of the containing oneof in the message - Prop *Properties -} - -// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec. -// See encode.go, (*Buffer).enc_struct. - -func (sp *StructProperties) Len() int { return len(sp.order) } -func (sp *StructProperties) Less(i, j int) bool { - return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag -} -func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] } - -// Properties represents the protocol-specific behavior of a single struct field. -type Properties struct { - Name string // name of the field, for error messages - OrigName string // original name before protocol compiler (always set) - JSONName string // name to use for JSON; determined by protoc - Wire string - WireType int - Tag int - Required bool - Optional bool - Repeated bool - Packed bool // relevant for repeated primitives only - Enum string // set for enum types only - proto3 bool // whether this is known to be a proto3 field; set for []byte only - oneof bool // whether this is a oneof field - - Default string // default value - HasDefault bool // whether an explicit default was provided - def_uint64 uint64 - - enc encoder - valEnc valueEncoder // set for bool and numeric types only - field field - tagcode []byte // encoding of EncodeVarint((Tag<<3)|WireType) - tagbuf [8]byte - stype reflect.Type // set for struct types only - sprop *StructProperties // set for struct types only - isMarshaler bool - isUnmarshaler bool - - mtype reflect.Type // set for map types only - mkeyprop *Properties // set for map types only - mvalprop *Properties // set for map types only - - size sizer - valSize valueSizer // set for bool and numeric types only - - dec decoder - valDec valueDecoder // set for bool and numeric types only - - // If this is a packable field, this will be the decoder for the packed version of the field. - packedDec decoder -} - -// String formats the properties in the protobuf struct field tag style. -func (p *Properties) String() string { - s := p.Wire - s = "," - s += strconv.Itoa(p.Tag) - if p.Required { - s += ",req" - } - if p.Optional { - s += ",opt" - } - if p.Repeated { - s += ",rep" - } - if p.Packed { - s += ",packed" - } - s += ",name=" + p.OrigName - if p.JSONName != p.OrigName { - s += ",json=" + p.JSONName - } - if p.proto3 { - s += ",proto3" - } - if p.oneof { - s += ",oneof" - } - if len(p.Enum) > 0 { - s += ",enum=" + p.Enum - } - if p.HasDefault { - s += ",def=" + p.Default - } - return s -} - -// Parse populates p by parsing a string in the protobuf struct field tag style. -func (p *Properties) Parse(s string) { - // "bytes,49,opt,name=foo,def=hello!" - fields := strings.Split(s, ",") // breaks def=, but handled below. - if len(fields) < 2 { - fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s) - return - } - - p.Wire = fields[0] - switch p.Wire { - case "varint": - p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeVarint - p.valDec = (*Buffer).DecodeVarint - p.valSize = sizeVarint - case "fixed32": - p.WireType = WireFixed32 - p.valEnc = (*Buffer).EncodeFixed32 - p.valDec = (*Buffer).DecodeFixed32 - p.valSize = sizeFixed32 - case "fixed64": - p.WireType = WireFixed64 - p.valEnc = (*Buffer).EncodeFixed64 - p.valDec = (*Buffer).DecodeFixed64 - p.valSize = sizeFixed64 - case "zigzag32": - p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag32 - p.valDec = (*Buffer).DecodeZigzag32 - p.valSize = sizeZigzag32 - case "zigzag64": - p.WireType = WireVarint - p.valEnc = (*Buffer).EncodeZigzag64 - p.valDec = (*Buffer).DecodeZigzag64 - p.valSize = sizeZigzag64 - case "bytes", "group": - p.WireType = WireBytes - // no numeric converter for non-numeric types - default: - fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s) - return - } - - var err error - p.Tag, err = strconv.Atoi(fields[1]) - if err != nil { - return - } - - for i := 2; i < len(fields); i++ { - f := fields[i] - switch { - case f == "req": - p.Required = true - case f == "opt": - p.Optional = true - case f == "rep": - p.Repeated = true - case f == "packed": - p.Packed = true - case strings.HasPrefix(f, "name="): - p.OrigName = f[5:] - case strings.HasPrefix(f, "json="): - p.JSONName = f[5:] - case strings.HasPrefix(f, "enum="): - p.Enum = f[5:] - case f == "proto3": - p.proto3 = true - case f == "oneof": - p.oneof = true - case strings.HasPrefix(f, "def="): - p.HasDefault = true - p.Default = f[4:] // rest of string - if i+1 < len(fields) { - // Commas aren't escaped, and def is always last. - p.Default += "," + strings.Join(fields[i+1:], ",") - break - } - } - } -} - -func logNoSliceEnc(t1, t2 reflect.Type) { - fmt.Fprintf(os.Stderr, "proto: no slice oenc for %T = []%T\n", t1, t2) -} - -var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() - -// Initialize the fields for encoding and decoding. -func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { - p.enc = nil - p.dec = nil - p.size = nil - - switch t1 := typ; t1.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1) - - // proto3 scalar types - - case reflect.Bool: - p.enc = (*Buffer).enc_proto3_bool - p.dec = (*Buffer).dec_proto3_bool - p.size = size_proto3_bool - case reflect.Int32: - p.enc = (*Buffer).enc_proto3_int32 - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_proto3_uint32 - p.dec = (*Buffer).dec_proto3_int32 // can reuse - p.size = size_proto3_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_proto3_int64 - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_proto3_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int32 - p.size = size_proto3_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_proto3_int64 // can just treat them as bits - p.dec = (*Buffer).dec_proto3_int64 - p.size = size_proto3_int64 - case reflect.String: - p.enc = (*Buffer).enc_proto3_string - p.dec = (*Buffer).dec_proto3_string - p.size = size_proto3_string - - case reflect.Ptr: - switch t2 := t1.Elem(); t2.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no encoder function for %v -> %v\n", t1, t2) - break - case reflect.Bool: - p.enc = (*Buffer).enc_bool - p.dec = (*Buffer).dec_bool - p.size = size_bool - case reflect.Int32: - p.enc = (*Buffer).enc_int32 - p.dec = (*Buffer).dec_int32 - p.size = size_int32 - case reflect.Uint32: - p.enc = (*Buffer).enc_uint32 - p.dec = (*Buffer).dec_int32 // can reuse - p.size = size_uint32 - case reflect.Int64, reflect.Uint64: - p.enc = (*Buffer).enc_int64 - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.Float32: - p.enc = (*Buffer).enc_uint32 // can just treat them as bits - p.dec = (*Buffer).dec_int32 - p.size = size_uint32 - case reflect.Float64: - p.enc = (*Buffer).enc_int64 // can just treat them as bits - p.dec = (*Buffer).dec_int64 - p.size = size_int64 - case reflect.String: - p.enc = (*Buffer).enc_string - p.dec = (*Buffer).dec_string - p.size = size_string - case reflect.Struct: - p.stype = t1.Elem() - p.isMarshaler = isMarshaler(t1) - p.isUnmarshaler = isUnmarshaler(t1) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_struct_message - p.dec = (*Buffer).dec_struct_message - p.size = size_struct_message - } else { - p.enc = (*Buffer).enc_struct_group - p.dec = (*Buffer).dec_struct_group - p.size = size_struct_group - } - } - - case reflect.Slice: - switch t2 := t1.Elem(); t2.Kind() { - default: - logNoSliceEnc(t1, t2) - break - case reflect.Bool: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_bool - p.size = size_slice_packed_bool - } else { - p.enc = (*Buffer).enc_slice_bool - p.size = size_slice_bool - } - p.dec = (*Buffer).dec_slice_bool - p.packedDec = (*Buffer).dec_slice_packed_bool - case reflect.Int32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int32 - p.size = size_slice_packed_int32 - } else { - p.enc = (*Buffer).enc_slice_int32 - p.size = size_slice_int32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Uint32: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case reflect.Int64, reflect.Uint64: - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - case reflect.Uint8: - p.dec = (*Buffer).dec_slice_byte - if p.proto3 { - p.enc = (*Buffer).enc_proto3_slice_byte - p.size = size_proto3_slice_byte - } else { - p.enc = (*Buffer).enc_slice_byte - p.size = size_slice_byte - } - case reflect.Float32, reflect.Float64: - switch t2.Bits() { - case 32: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_uint32 - p.size = size_slice_packed_uint32 - } else { - p.enc = (*Buffer).enc_slice_uint32 - p.size = size_slice_uint32 - } - p.dec = (*Buffer).dec_slice_int32 - p.packedDec = (*Buffer).dec_slice_packed_int32 - case 64: - // can just treat them as bits - if p.Packed { - p.enc = (*Buffer).enc_slice_packed_int64 - p.size = size_slice_packed_int64 - } else { - p.enc = (*Buffer).enc_slice_int64 - p.size = size_slice_int64 - } - p.dec = (*Buffer).dec_slice_int64 - p.packedDec = (*Buffer).dec_slice_packed_int64 - default: - logNoSliceEnc(t1, t2) - break - } - case reflect.String: - p.enc = (*Buffer).enc_slice_string - p.dec = (*Buffer).dec_slice_string - p.size = size_slice_string - case reflect.Ptr: - switch t3 := t2.Elem(); t3.Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no ptr oenc for %T -> %T -> %T\n", t1, t2, t3) - break - case reflect.Struct: - p.stype = t2.Elem() - p.isMarshaler = isMarshaler(t2) - p.isUnmarshaler = isUnmarshaler(t2) - if p.Wire == "bytes" { - p.enc = (*Buffer).enc_slice_struct_message - p.dec = (*Buffer).dec_slice_struct_message - p.size = size_slice_struct_message - } else { - p.enc = (*Buffer).enc_slice_struct_group - p.dec = (*Buffer).dec_slice_struct_group - p.size = size_slice_struct_group - } - } - case reflect.Slice: - switch t2.Elem().Kind() { - default: - fmt.Fprintf(os.Stderr, "proto: no slice elem oenc for %T -> %T -> %T\n", t1, t2, t2.Elem()) - break - case reflect.Uint8: - p.enc = (*Buffer).enc_slice_slice_byte - p.dec = (*Buffer).dec_slice_slice_byte - p.size = size_slice_slice_byte - } - } - - case reflect.Map: - p.enc = (*Buffer).enc_new_map - p.dec = (*Buffer).dec_new_map - p.size = size_new_map - - p.mtype = t1 - p.mkeyprop = &Properties{} - p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) - p.mvalprop = &Properties{} - vtype := p.mtype.Elem() - if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice { - // The value type is not a message (*T) or bytes ([]byte), - // so we need encoders for the pointer to this type. - vtype = reflect.PtrTo(vtype) - } - p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) - } - - // precalculate tag code - wire := p.WireType - if p.Packed { - wire = WireBytes - } - x := uint32(p.Tag)<<3 | uint32(wire) - i := 0 - for i = 0; x > 127; i++ { - p.tagbuf[i] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - p.tagbuf[i] = uint8(x) - p.tagcode = p.tagbuf[0 : i+1] - - if p.stype != nil { - if lockGetProp { - p.sprop = GetProperties(p.stype) - } else { - p.sprop = getPropertiesLocked(p.stype) - } - } -} - -var ( - marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() - unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem() -) - -// isMarshaler reports whether type t implements Marshaler. -func isMarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isMarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isMarshaler") - } - return t.Implements(marshalerType) -} - -// isUnmarshaler reports whether type t implements Unmarshaler. -func isUnmarshaler(t reflect.Type) bool { - // We're checking for (likely) pointer-receiver methods - // so if t is not a pointer, something is very wrong. - // The calls above only invoke isUnmarshaler on pointer types. - if t.Kind() != reflect.Ptr { - panic("proto: misuse of isUnmarshaler") - } - return t.Implements(unmarshalerType) -} - -// Init populates the properties from a protocol buffer struct tag. -func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { - p.init(typ, name, tag, f, true) -} - -func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) { - // "bytes,49,opt,def=hello!" - p.Name = name - p.OrigName = name - if f != nil { - p.field = toField(f) - } - if tag == "" { - return - } - p.Parse(tag) - p.setEncAndDec(typ, f, lockGetProp) -} - -var ( - propertiesMu sync.RWMutex - propertiesMap = make(map[reflect.Type]*StructProperties) -) - -// GetProperties returns the list of properties for the type represented by t. -// t must represent a generated struct type of a protocol message. -func GetProperties(t reflect.Type) *StructProperties { - if t.Kind() != reflect.Struct { - panic("proto: type must have kind struct") - } - - // Most calls to GetProperties in a long-running program will be - // retrieving details for types we have seen before. - propertiesMu.RLock() - sprop, ok := propertiesMap[t] - propertiesMu.RUnlock() - if ok { - if collectStats { - stats.Chit++ - } - return sprop - } - - propertiesMu.Lock() - sprop = getPropertiesLocked(t) - propertiesMu.Unlock() - return sprop -} - -// getPropertiesLocked requires that propertiesMu is held. -func getPropertiesLocked(t reflect.Type) *StructProperties { - if prop, ok := propertiesMap[t]; ok { - if collectStats { - stats.Chit++ - } - return prop - } - if collectStats { - stats.Cmiss++ - } - - prop := new(StructProperties) - // in case of recursive protos, fill this in now. - propertiesMap[t] = prop - - // build properties - prop.extendable = reflect.PtrTo(t).Implements(extendableProtoType) || - reflect.PtrTo(t).Implements(extendableProtoV1Type) - prop.unrecField = invalidField - prop.Prop = make([]*Properties, t.NumField()) - prop.order = make([]int, t.NumField()) - - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - p := new(Properties) - name := f.Name - p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) - - if f.Name == "XXX_InternalExtensions" { // special case - p.enc = (*Buffer).enc_exts - p.dec = nil // not needed - p.size = size_exts - } else if f.Name == "XXX_extensions" { // special case - p.enc = (*Buffer).enc_map - p.dec = nil // not needed - p.size = size_map - } else if f.Name == "XXX_unrecognized" { // special case - prop.unrecField = toField(&f) - } - oneof := f.Tag.Get("protobuf_oneof") // special case - if oneof != "" { - // Oneof fields don't use the traditional protobuf tag. - p.OrigName = oneof - } - prop.Prop[i] = p - prop.order[i] = i - if debug { - print(i, " ", f.Name, " ", t.String(), " ") - if p.Tag > 0 { - print(p.String()) - } - print("\n") - } - if p.enc == nil && !strings.HasPrefix(f.Name, "XXX_") && oneof == "" { - fmt.Fprintln(os.Stderr, "proto: no encoder for", f.Name, f.Type.String(), "[GetProperties]") - } - } - - // Re-order prop.order. - sort.Sort(prop) - - type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) - } - if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { - var oots []interface{} - prop.oneofMarshaler, prop.oneofUnmarshaler, prop.oneofSizer, oots = om.XXX_OneofFuncs() - prop.stype = t - - // Interpret oneof metadata. - prop.OneofTypes = make(map[string]*OneofProperties) - for _, oot := range oots { - oop := &OneofProperties{ - Type: reflect.ValueOf(oot).Type(), // *T - Prop: new(Properties), - } - sft := oop.Type.Elem().Field(0) - oop.Prop.Name = sft.Name - oop.Prop.Parse(sft.Tag.Get("protobuf")) - // There will be exactly one interface field that - // this new value is assignable to. - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if f.Type.Kind() != reflect.Interface { - continue - } - if !oop.Type.AssignableTo(f.Type) { - continue - } - oop.Field = i - break - } - prop.OneofTypes[oop.Prop.OrigName] = oop - } - } - - // build required counts - // build tags - reqCount := 0 - prop.decoderOrigNames = make(map[string]int) - for i, p := range prop.Prop { - if strings.HasPrefix(p.Name, "XXX_") { - // Internal fields should not appear in tags/origNames maps. - // They are handled specially when encoding and decoding. - continue - } - if p.Required { - reqCount++ - } - prop.decoderTags.put(p.Tag, i) - prop.decoderOrigNames[p.OrigName] = i - } - prop.reqCount = reqCount - - return prop -} - -// Return the Properties object for the x[0]'th field of the structure. -func propByIndex(t reflect.Type, x []int) *Properties { - if len(x) != 1 { - fmt.Fprintf(os.Stderr, "proto: field index dimension %d (not 1) for type %s\n", len(x), t) - return nil - } - prop := GetProperties(t) - return prop.Prop[x[0]] -} - -// Get the address and type of a pointer to a struct from an interface. -func getbase(pb Message) (t reflect.Type, b structPointer, err error) { - if pb == nil { - err = ErrNil - return - } - // get the reflect type of the pointer to the struct. - t = reflect.TypeOf(pb) - // get the address of the struct. - value := reflect.ValueOf(pb) - b = toStructPointer(value) - return -} - -// A global registry of enum types. -// The generated code will register the generated maps by calling RegisterEnum. - -var enumValueMaps = make(map[string]map[string]int32) - -// RegisterEnum is called from the generated code to install the enum descriptor -// maps into the global table to aid parsing text format protocol buffers. -func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) { - if _, ok := enumValueMaps[typeName]; ok { - panic("proto: duplicate enum registered: " + typeName) - } - enumValueMaps[typeName] = valueMap -} - -// EnumValueMap returns the mapping from names to integers of the -// enum type enumType, or a nil if not found. -func EnumValueMap(enumType string) map[string]int32 { - return enumValueMaps[enumType] -} - -// A registry of all linked message types. -// The string is a fully-qualified proto name ("pkg.Message"). -var ( - protoTypes = make(map[string]reflect.Type) - revProtoTypes = make(map[reflect.Type]string) -) - -// RegisterType is called from generated code and maps from the fully qualified -// proto name to the type (pointer to struct) of the protocol buffer. -func RegisterType(x Message, name string) { - if _, ok := protoTypes[name]; ok { - // TODO: Some day, make this a panic. - log.Printf("proto: duplicate proto type registered: %s", name) - return - } - t := reflect.TypeOf(x) - protoTypes[name] = t - revProtoTypes[t] = name -} - -// MessageName returns the fully-qualified proto name for the given message type. -func MessageName(x Message) string { - type xname interface { - XXX_MessageName() string - } - if m, ok := x.(xname); ok { - return m.XXX_MessageName() - } - return revProtoTypes[reflect.TypeOf(x)] -} - -// MessageType returns the message type (pointer to struct) for a named message. -func MessageType(name string) reflect.Type { return protoTypes[name] } - -// A registry of all linked proto files. -var ( - protoFiles = make(map[string][]byte) // file name => fileDescriptor -) - -// RegisterFile is called from generated code and maps from the -// full file name of a .proto file to its compressed FileDescriptorProto. -func RegisterFile(filename string, fileDescriptor []byte) { - protoFiles[filename] = fileDescriptor -} - -// FileDescriptor returns the compressed FileDescriptorProto for a .proto file. -func FileDescriptor(filename string) []byte { return protoFiles[filename] } diff --git a/vendor/github.com/golang/protobuf/proto/text.go b/vendor/github.com/golang/protobuf/proto/text.go deleted file mode 100644 index 965876b..0000000 --- a/vendor/github.com/golang/protobuf/proto/text.go +++ /dev/null @@ -1,854 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for writing the text protocol buffer format. - -import ( - "bufio" - "bytes" - "encoding" - "errors" - "fmt" - "io" - "log" - "math" - "reflect" - "sort" - "strings" -) - -var ( - newline = []byte("\n") - spaces = []byte(" ") - gtNewline = []byte(">\n") - endBraceNewline = []byte("}\n") - backslashN = []byte{'\\', 'n'} - backslashR = []byte{'\\', 'r'} - backslashT = []byte{'\\', 't'} - backslashDQ = []byte{'\\', '"'} - backslashBS = []byte{'\\', '\\'} - posInf = []byte("inf") - negInf = []byte("-inf") - nan = []byte("nan") -) - -type writer interface { - io.Writer - WriteByte(byte) error -} - -// textWriter is an io.Writer that tracks its indentation level. -type textWriter struct { - ind int - complete bool // if the current position is a complete line - compact bool // whether to write out as a one-liner - w writer -} - -func (w *textWriter) WriteString(s string) (n int, err error) { - if !strings.Contains(s, "\n") { - if !w.compact && w.complete { - w.writeIndent() - } - w.complete = false - return io.WriteString(w.w, s) - } - // WriteString is typically called without newlines, so this - // codepath and its copy are rare. We copy to avoid - // duplicating all of Write's logic here. - return w.Write([]byte(s)) -} - -func (w *textWriter) Write(p []byte) (n int, err error) { - newlines := bytes.Count(p, newline) - if newlines == 0 { - if !w.compact && w.complete { - w.writeIndent() - } - n, err = w.w.Write(p) - w.complete = false - return n, err - } - - frags := bytes.SplitN(p, newline, newlines+1) - if w.compact { - for i, frag := range frags { - if i > 0 { - if err := w.w.WriteByte(' '); err != nil { - return n, err - } - n++ - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - } - return n, nil - } - - for i, frag := range frags { - if w.complete { - w.writeIndent() - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - if i+1 < len(frags) { - if err := w.w.WriteByte('\n'); err != nil { - return n, err - } - n++ - } - } - w.complete = len(frags[len(frags)-1]) == 0 - return n, nil -} - -func (w *textWriter) WriteByte(c byte) error { - if w.compact && c == '\n' { - c = ' ' - } - if !w.compact && w.complete { - w.writeIndent() - } - err := w.w.WriteByte(c) - w.complete = c == '\n' - return err -} - -func (w *textWriter) indent() { w.ind++ } - -func (w *textWriter) unindent() { - if w.ind == 0 { - log.Print("proto: textWriter unindented too far") - return - } - w.ind-- -} - -func writeName(w *textWriter, props *Properties) error { - if _, err := w.WriteString(props.OrigName); err != nil { - return err - } - if props.Wire != "group" { - return w.WriteByte(':') - } - return nil -} - -// raw is the interface satisfied by RawMessage. -type raw interface { - Bytes() []byte -} - -func requiresQuotes(u string) bool { - // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. - for _, ch := range u { - switch { - case ch == '.' || ch == '/' || ch == '_': - continue - case '0' <= ch && ch <= '9': - continue - case 'A' <= ch && ch <= 'Z': - continue - case 'a' <= ch && ch <= 'z': - continue - default: - return true - } - } - return false -} - -// isAny reports whether sv is a google.protobuf.Any message -func isAny(sv reflect.Value) bool { - type wkt interface { - XXX_WellKnownType() string - } - t, ok := sv.Addr().Interface().(wkt) - return ok && t.XXX_WellKnownType() == "Any" -} - -// writeProto3Any writes an expanded google.protobuf.Any message. -// -// It returns (false, nil) if sv value can't be unmarshaled (e.g. because -// required messages are not linked in). -// -// It returns (true, error) when sv was written in expanded format or an error -// was encountered. -func (tm *TextMarshaler) writeProto3Any(w *textWriter, sv reflect.Value) (bool, error) { - turl := sv.FieldByName("TypeUrl") - val := sv.FieldByName("Value") - if !turl.IsValid() || !val.IsValid() { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - b, ok := val.Interface().([]byte) - if !ok { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - parts := strings.Split(turl.String(), "/") - mt := MessageType(parts[len(parts)-1]) - if mt == nil { - return false, nil - } - m := reflect.New(mt.Elem()) - if err := Unmarshal(b, m.Interface().(Message)); err != nil { - return false, nil - } - w.Write([]byte("[")) - u := turl.String() - if requiresQuotes(u) { - writeString(w, u) - } else { - w.Write([]byte(u)) - } - if w.compact { - w.Write([]byte("]:<")) - } else { - w.Write([]byte("]: <\n")) - w.ind++ - } - if err := tm.writeStruct(w, m.Elem()); err != nil { - return true, err - } - if w.compact { - w.Write([]byte("> ")) - } else { - w.ind-- - w.Write([]byte(">\n")) - } - return true, nil -} - -func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { - if tm.ExpandAny && isAny(sv) { - if canExpand, err := tm.writeProto3Any(w, sv); canExpand { - return err - } - } - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < sv.NumField(); i++ { - fv := sv.Field(i) - props := sprops.Prop[i] - name := st.Field(i).Name - - if strings.HasPrefix(name, "XXX_") { - // There are two XXX_ fields: - // XXX_unrecognized []byte - // XXX_extensions map[int32]proto.Extension - // The first is handled here; - // the second is handled at the bottom of this function. - if name == "XXX_unrecognized" && !fv.IsNil() { - if err := writeUnknownStruct(w, fv.Interface().([]byte)); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Field not filled in. This could be an optional field or - // a required field that wasn't filled in. Either way, there - // isn't anything we can show for it. - continue - } - if fv.Kind() == reflect.Slice && fv.IsNil() { - // Repeated field that is empty, or a bytes field that is unused. - continue - } - - if props.Repeated && fv.Kind() == reflect.Slice { - // Repeated field. - for j := 0; j < fv.Len(); j++ { - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - v := fv.Index(j) - if v.Kind() == reflect.Ptr && v.IsNil() { - // A nil message in a repeated field is not valid, - // but we can handle that more gracefully than panicking. - if _, err := w.Write([]byte("\n")); err != nil { - return err - } - continue - } - if err := tm.writeAny(w, v, props); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Map { - // Map fields are rendered as a repeated struct with key/value fields. - keys := fv.MapKeys() - sort.Sort(mapKeys(keys)) - for _, key := range keys { - val := fv.MapIndex(key) - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - // open struct - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - // key - if _, err := w.WriteString("key:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, key, props.mkeyprop); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - // nil values aren't legal, but we can avoid panicking because of them. - if val.Kind() != reflect.Ptr || !val.IsNil() { - // value - if _, err := w.WriteString("value:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, val, props.mvalprop); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - // close struct - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if props.proto3 && fv.Kind() == reflect.Slice && fv.Len() == 0 { - // empty bytes field - continue - } - if fv.Kind() != reflect.Ptr && fv.Kind() != reflect.Slice { - // proto3 non-repeated scalar field; skip if zero value - if isProto3Zero(fv) { - continue - } - } - - if fv.Kind() == reflect.Interface { - // Check if it is a oneof. - if st.Field(i).Tag.Get("protobuf_oneof") != "" { - // fv is nil, or holds a pointer to generated struct. - // That generated struct has exactly one field, - // which has a protobuf struct tag. - if fv.IsNil() { - continue - } - inner := fv.Elem().Elem() // interface -> *T -> T - tag := inner.Type().Field(0).Tag.Get("protobuf") - props = new(Properties) // Overwrite the outer props var, but not its pointee. - props.Parse(tag) - // Write the value in the oneof, not the oneof itself. - fv = inner.Field(0) - - // Special case to cope with malformed messages gracefully: - // If the value in the oneof is a nil pointer, don't panic - // in writeAny. - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Use errors.New so writeAny won't render quotes. - msg := errors.New("/* nil */") - fv = reflect.ValueOf(&msg).Elem() - } - } - } - - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if b, ok := fv.Interface().(raw); ok { - if err := writeRaw(w, b.Bytes()); err != nil { - return err - } - continue - } - - // Enums have a String method, so writeAny will work fine. - if err := tm.writeAny(w, fv, props); err != nil { - return err - } - - if err := w.WriteByte('\n'); err != nil { - return err - } - } - - // Extensions (the XXX_extensions field). - pv := sv.Addr() - if _, ok := extendable(pv.Interface()); ok { - if err := tm.writeExtensions(w, pv); err != nil { - return err - } - } - - return nil -} - -// writeRaw writes an uninterpreted raw message. -func writeRaw(w *textWriter, b []byte) error { - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if err := writeUnknownStruct(w, b); err != nil { - return err - } - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - return nil -} - -// writeAny writes an arbitrary field. -func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { - v = reflect.Indirect(v) - - // Floats have special cases. - if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 { - x := v.Float() - var b []byte - switch { - case math.IsInf(x, 1): - b = posInf - case math.IsInf(x, -1): - b = negInf - case math.IsNaN(x): - b = nan - } - if b != nil { - _, err := w.Write(b) - return err - } - // Other values are handled below. - } - - // We don't attempt to serialise every possible value type; only those - // that can occur in protocol buffers. - switch v.Kind() { - case reflect.Slice: - // Should only be a []byte; repeated fields are handled in writeStruct. - if err := writeString(w, string(v.Bytes())); err != nil { - return err - } - case reflect.String: - if err := writeString(w, v.String()); err != nil { - return err - } - case reflect.Struct: - // Required/optional group/message. - var bra, ket byte = '<', '>' - if props != nil && props.Wire == "group" { - bra, ket = '{', '}' - } - if err := w.WriteByte(bra); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if etm, ok := v.Interface().(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = w.Write(text); err != nil { - return err - } - } else if err := tm.writeStruct(w, v); err != nil { - return err - } - w.unindent() - if err := w.WriteByte(ket); err != nil { - return err - } - default: - _, err := fmt.Fprint(w, v.Interface()) - return err - } - return nil -} - -// equivalent to C's isprint. -func isprint(c byte) bool { - return c >= 0x20 && c < 0x7f -} - -// writeString writes a string in the protocol buffer text format. -// It is similar to strconv.Quote except we don't use Go escape sequences, -// we treat the string as a byte sequence, and we use octal escapes. -// These differences are to maintain interoperability with the other -// languages' implementations of the text format. -func writeString(w *textWriter, s string) error { - // use WriteByte here to get any needed indent - if err := w.WriteByte('"'); err != nil { - return err - } - // Loop over the bytes, not the runes. - for i := 0; i < len(s); i++ { - var err error - // Divergence from C++: we don't escape apostrophes. - // There's no need to escape them, and the C++ parser - // copes with a naked apostrophe. - switch c := s[i]; c { - case '\n': - _, err = w.w.Write(backslashN) - case '\r': - _, err = w.w.Write(backslashR) - case '\t': - _, err = w.w.Write(backslashT) - case '"': - _, err = w.w.Write(backslashDQ) - case '\\': - _, err = w.w.Write(backslashBS) - default: - if isprint(c) { - err = w.w.WriteByte(c) - } else { - _, err = fmt.Fprintf(w.w, "\\%03o", c) - } - } - if err != nil { - return err - } - } - return w.WriteByte('"') -} - -func writeUnknownStruct(w *textWriter, data []byte) (err error) { - if !w.compact { - if _, err := fmt.Fprintf(w, "/* %d unknown bytes */\n", len(data)); err != nil { - return err - } - } - b := NewBuffer(data) - for b.index < len(b.buf) { - x, err := b.DecodeVarint() - if err != nil { - _, err := fmt.Fprintf(w, "/* %v */\n", err) - return err - } - wire, tag := x&7, x>>3 - if wire == WireEndGroup { - w.unindent() - if _, err := w.Write(endBraceNewline); err != nil { - return err - } - continue - } - if _, err := fmt.Fprint(w, tag); err != nil { - return err - } - if wire != WireStartGroup { - if err := w.WriteByte(':'); err != nil { - return err - } - } - if !w.compact || wire == WireStartGroup { - if err := w.WriteByte(' '); err != nil { - return err - } - } - switch wire { - case WireBytes: - buf, e := b.DecodeRawBytes(false) - if e == nil { - _, err = fmt.Fprintf(w, "%q", buf) - } else { - _, err = fmt.Fprintf(w, "/* %v */", e) - } - case WireFixed32: - x, err = b.DecodeFixed32() - err = writeUnknownInt(w, x, err) - case WireFixed64: - x, err = b.DecodeFixed64() - err = writeUnknownInt(w, x, err) - case WireStartGroup: - err = w.WriteByte('{') - w.indent() - case WireVarint: - x, err = b.DecodeVarint() - err = writeUnknownInt(w, x, err) - default: - _, err = fmt.Fprintf(w, "/* unknown wire type %d */", wire) - } - if err != nil { - return err - } - if err = w.WriteByte('\n'); err != nil { - return err - } - } - return nil -} - -func writeUnknownInt(w *textWriter, x uint64, err error) error { - if err == nil { - _, err = fmt.Fprint(w, x) - } else { - _, err = fmt.Fprintf(w, "/* %v */", err) - } - return err -} - -type int32Slice []int32 - -func (s int32Slice) Len() int { return len(s) } -func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] } -func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -// writeExtensions writes all the extensions in pv. -// pv is assumed to be a pointer to a protocol message struct that is extendable. -func (tm *TextMarshaler) writeExtensions(w *textWriter, pv reflect.Value) error { - emap := extensionMaps[pv.Type().Elem()] - ep, _ := extendable(pv.Interface()) - - // Order the extensions by ID. - // This isn't strictly necessary, but it will give us - // canonical output, which will also make testing easier. - m, mu := ep.extensionsRead() - if m == nil { - return nil - } - mu.Lock() - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) - mu.Unlock() - - for _, extNum := range ids { - ext := m[extNum] - var desc *ExtensionDesc - if emap != nil { - desc = emap[extNum] - } - if desc == nil { - // Unknown extension. - if err := writeUnknownStruct(w, ext.enc); err != nil { - return err - } - continue - } - - pb, err := GetExtension(ep, desc) - if err != nil { - return fmt.Errorf("failed getting extension: %v", err) - } - - // Repeated extensions will appear as a slice. - if !desc.repeated() { - if err := tm.writeExtension(w, desc.Name, pb); err != nil { - return err - } - } else { - v := reflect.ValueOf(pb) - for i := 0; i < v.Len(); i++ { - if err := tm.writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil { - return err - } - } - } - } - return nil -} - -func (tm *TextMarshaler) writeExtension(w *textWriter, name string, pb interface{}) error { - if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, reflect.ValueOf(pb), nil); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - return nil -} - -func (w *textWriter) writeIndent() { - if !w.complete { - return - } - remain := w.ind * 2 - for remain > 0 { - n := remain - if n > len(spaces) { - n = len(spaces) - } - w.w.Write(spaces[:n]) - remain -= n - } - w.complete = false -} - -// TextMarshaler is a configurable text format marshaler. -type TextMarshaler struct { - Compact bool // use compact text format (one line). - ExpandAny bool // expand google.protobuf.Any messages of known types -} - -// Marshal writes a given protocol buffer in text format. -// The only errors returned are from w. -func (tm *TextMarshaler) Marshal(w io.Writer, pb Message) error { - val := reflect.ValueOf(pb) - if pb == nil || val.IsNil() { - w.Write([]byte("")) - return nil - } - var bw *bufio.Writer - ww, ok := w.(writer) - if !ok { - bw = bufio.NewWriter(w) - ww = bw - } - aw := &textWriter{ - w: ww, - complete: true, - compact: tm.Compact, - } - - if etm, ok := pb.(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = aw.Write(text); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil - } - // Dereference the received pointer so we don't have outer < and >. - v := reflect.Indirect(val) - if err := tm.writeStruct(aw, v); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil -} - -// Text is the same as Marshal, but returns the string directly. -func (tm *TextMarshaler) Text(pb Message) string { - var buf bytes.Buffer - tm.Marshal(&buf, pb) - return buf.String() -} - -var ( - defaultTextMarshaler = TextMarshaler{} - compactTextMarshaler = TextMarshaler{Compact: true} -) - -// TODO: consider removing some of the Marshal functions below. - -// MarshalText writes a given protocol buffer in text format. -// The only errors returned are from w. -func MarshalText(w io.Writer, pb Message) error { return defaultTextMarshaler.Marshal(w, pb) } - -// MarshalTextString is the same as MarshalText, but returns the string directly. -func MarshalTextString(pb Message) string { return defaultTextMarshaler.Text(pb) } - -// CompactText writes a given protocol buffer in compact text format (one line). -func CompactText(w io.Writer, pb Message) error { return compactTextMarshaler.Marshal(w, pb) } - -// CompactTextString is the same as CompactText, but returns the string directly. -func CompactTextString(pb Message) string { return compactTextMarshaler.Text(pb) } diff --git a/vendor/github.com/golang/protobuf/proto/text_parser.go b/vendor/github.com/golang/protobuf/proto/text_parser.go deleted file mode 100644 index 61f83c1..0000000 --- a/vendor/github.com/golang/protobuf/proto/text_parser.go +++ /dev/null @@ -1,895 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for parsing the Text protocol buffer format. -// TODO: message sets. - -import ( - "encoding" - "errors" - "fmt" - "reflect" - "strconv" - "strings" - "unicode/utf8" -) - -// Error string emitted when deserializing Any and fields are already set -const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set" - -type ParseError struct { - Message string - Line int // 1-based line number - Offset int // 0-based byte offset from start of input -} - -func (p *ParseError) Error() string { - if p.Line == 1 { - // show offset only for first line - return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message) - } - return fmt.Sprintf("line %d: %v", p.Line, p.Message) -} - -type token struct { - value string - err *ParseError - line int // line number - offset int // byte number from start of input, not start of line - unquoted string // the unquoted version of value, if it was a quoted string -} - -func (t *token) String() string { - if t.err == nil { - return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset) - } - return fmt.Sprintf("parse error: %v", t.err) -} - -type textParser struct { - s string // remaining input - done bool // whether the parsing is finished (success or error) - backed bool // whether back() was called - offset, line int - cur token -} - -func newTextParser(s string) *textParser { - p := new(textParser) - p.s = s - p.line = 1 - p.cur.line = 1 - return p -} - -func (p *textParser) errorf(format string, a ...interface{}) *ParseError { - pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} - p.cur.err = pe - p.done = true - return pe -} - -// Numbers and identifiers are matched by [-+._A-Za-z0-9] -func isIdentOrNumberChar(c byte) bool { - switch { - case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': - return true - case '0' <= c && c <= '9': - return true - } - switch c { - case '-', '+', '.', '_': - return true - } - return false -} - -func isWhitespace(c byte) bool { - switch c { - case ' ', '\t', '\n', '\r': - return true - } - return false -} - -func isQuote(c byte) bool { - switch c { - case '"', '\'': - return true - } - return false -} - -func (p *textParser) skipWhitespace() { - i := 0 - for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { - if p.s[i] == '#' { - // comment; skip to end of line or input - for i < len(p.s) && p.s[i] != '\n' { - i++ - } - if i == len(p.s) { - break - } - } - if p.s[i] == '\n' { - p.line++ - } - i++ - } - p.offset += i - p.s = p.s[i:len(p.s)] - if len(p.s) == 0 { - p.done = true - } -} - -func (p *textParser) advance() { - // Skip whitespace - p.skipWhitespace() - if p.done { - return - } - - // Start of non-whitespace - p.cur.err = nil - p.cur.offset, p.cur.line = p.offset, p.line - p.cur.unquoted = "" - switch p.s[0] { - case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': - // Single symbol - p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] - case '"', '\'': - // Quoted string - i := 1 - for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { - if p.s[i] == '\\' && i+1 < len(p.s) { - // skip escaped char - i++ - } - i++ - } - if i >= len(p.s) || p.s[i] != p.s[0] { - p.errorf("unmatched quote") - return - } - unq, err := unquoteC(p.s[1:i], rune(p.s[0])) - if err != nil { - p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) - return - } - p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] - p.cur.unquoted = unq - default: - i := 0 - for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { - i++ - } - if i == 0 { - p.errorf("unexpected byte %#x", p.s[0]) - return - } - p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] - } - p.offset += len(p.cur.value) -} - -var ( - errBadUTF8 = errors.New("proto: bad UTF-8") - errBadHex = errors.New("proto: bad hexadecimal") -) - -func unquoteC(s string, quote rune) (string, error) { - // This is based on C++'s tokenizer.cc. - // Despite its name, this is *not* parsing C syntax. - // For instance, "\0" is an invalid quoted string. - - // Avoid allocation in trivial cases. - simple := true - for _, r := range s { - if r == '\\' || r == quote { - simple = false - break - } - } - if simple { - return s, nil - } - - buf := make([]byte, 0, 3*len(s)/2) - for len(s) > 0 { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", errBadUTF8 - } - s = s[n:] - if r != '\\' { - if r < utf8.RuneSelf { - buf = append(buf, byte(r)) - } else { - buf = append(buf, string(r)...) - } - continue - } - - ch, tail, err := unescape(s) - if err != nil { - return "", err - } - buf = append(buf, ch...) - s = tail - } - return string(buf), nil -} - -func unescape(s string) (ch string, tail string, err error) { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", "", errBadUTF8 - } - s = s[n:] - switch r { - case 'a': - return "\a", s, nil - case 'b': - return "\b", s, nil - case 'f': - return "\f", s, nil - case 'n': - return "\n", s, nil - case 'r': - return "\r", s, nil - case 't': - return "\t", s, nil - case 'v': - return "\v", s, nil - case '?': - return "?", s, nil // trigraph workaround - case '\'', '"', '\\': - return string(r), s, nil - case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X': - if len(s) < 2 { - return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) - } - base := 8 - ss := s[:2] - s = s[2:] - if r == 'x' || r == 'X' { - base = 16 - } else { - ss = string(r) + ss - } - i, err := strconv.ParseUint(ss, base, 8) - if err != nil { - return "", "", err - } - return string([]byte{byte(i)}), s, nil - case 'u', 'U': - n := 4 - if r == 'U' { - n = 8 - } - if len(s) < n { - return "", "", fmt.Errorf(`\%c requires %d digits`, r, n) - } - - bs := make([]byte, n/2) - for i := 0; i < n; i += 2 { - a, ok1 := unhex(s[i]) - b, ok2 := unhex(s[i+1]) - if !ok1 || !ok2 { - return "", "", errBadHex - } - bs[i/2] = a<<4 | b - } - s = s[n:] - return string(bs), s, nil - } - return "", "", fmt.Errorf(`unknown escape \%c`, r) -} - -// Adapted from src/pkg/strconv/quote.go. -func unhex(b byte) (v byte, ok bool) { - switch { - case '0' <= b && b <= '9': - return b - '0', true - case 'a' <= b && b <= 'f': - return b - 'a' + 10, true - case 'A' <= b && b <= 'F': - return b - 'A' + 10, true - } - return 0, false -} - -// Back off the parser by one token. Can only be done between calls to next(). -// It makes the next advance() a no-op. -func (p *textParser) back() { p.backed = true } - -// Advances the parser and returns the new current token. -func (p *textParser) next() *token { - if p.backed || p.done { - p.backed = false - return &p.cur - } - p.advance() - if p.done { - p.cur.value = "" - } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { - // Look for multiple quoted strings separated by whitespace, - // and concatenate them. - cat := p.cur - for { - p.skipWhitespace() - if p.done || !isQuote(p.s[0]) { - break - } - p.advance() - if p.cur.err != nil { - return &p.cur - } - cat.value += " " + p.cur.value - cat.unquoted += p.cur.unquoted - } - p.done = false // parser may have seen EOF, but we want to return cat - p.cur = cat - } - return &p.cur -} - -func (p *textParser) consumeToken(s string) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != s { - p.back() - return p.errorf("expected %q, found %q", s, tok.value) - } - return nil -} - -// Return a RequiredNotSetError indicating which required field was not set. -func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError { - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < st.NumField(); i++ { - if !isNil(sv.Field(i)) { - continue - } - - props := sprops.Prop[i] - if props.Required { - return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)} - } - } - return &RequiredNotSetError{fmt.Sprintf("%v.", st)} // should not happen -} - -// Returns the index in the struct for the named field, as well as the parsed tag properties. -func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) { - i, ok := sprops.decoderOrigNames[name] - if ok { - return i, sprops.Prop[i], true - } - return -1, nil, false -} - -// Consume a ':' from the input stream (if the next token is a colon), -// returning an error if a colon is needed but not present. -func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ":" { - // Colon is optional when the field is a group or message. - needColon := true - switch props.Wire { - case "group": - needColon = false - case "bytes": - // A "bytes" field is either a message, a string, or a repeated field; - // those three become *T, *string and []T respectively, so we can check for - // this field being a pointer to a non-string. - if typ.Kind() == reflect.Ptr { - // *T or *string - if typ.Elem().Kind() == reflect.String { - break - } - } else if typ.Kind() == reflect.Slice { - // []T or []*T - if typ.Elem().Kind() != reflect.Ptr { - break - } - } else if typ.Kind() == reflect.String { - // The proto3 exception is for a string field, - // which requires a colon. - break - } - needColon = false - } - if needColon { - return p.errorf("expected ':', found %q", tok.value) - } - p.back() - } - return nil -} - -func (p *textParser) readStruct(sv reflect.Value, terminator string) error { - st := sv.Type() - sprops := GetProperties(st) - reqCount := sprops.reqCount - var reqFieldErr error - fieldSet := make(map[string]bool) - // A struct is a sequence of "name: value", terminated by one of - // '>' or '}', or the end of the input. A name may also be - // "[extension]" or "[type/url]". - // - // The whole struct can also be an expanded Any message, like: - // [type/url] < ... struct contents ... > - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - if tok.value == "[" { - // Looks like an extension or an Any. - // - // TODO: Check whether we need to handle - // namespace rooted names (e.g. ".something.Foo"). - extName, err := p.consumeExtName() - if err != nil { - return err - } - - if s := strings.LastIndex(extName, "/"); s >= 0 { - // If it contains a slash, it's an Any type URL. - messageName := extName[s+1:] - mt := MessageType(messageName) - if mt == nil { - return p.errorf("unrecognized message %q in google.protobuf.Any", messageName) - } - tok = p.next() - if tok.err != nil { - return tok.err - } - // consume an optional colon - if tok.value == ":" { - tok = p.next() - if tok.err != nil { - return tok.err - } - } - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - v := reflect.New(mt.Elem()) - if pe := p.readStruct(v.Elem(), terminator); pe != nil { - return pe - } - b, err := Marshal(v.Interface().(Message)) - if err != nil { - return p.errorf("failed to marshal message of type %q: %v", messageName, err) - } - if fieldSet["type_url"] { - return p.errorf(anyRepeatedlyUnpacked, "type_url") - } - if fieldSet["value"] { - return p.errorf(anyRepeatedlyUnpacked, "value") - } - sv.FieldByName("TypeUrl").SetString(extName) - sv.FieldByName("Value").SetBytes(b) - fieldSet["type_url"] = true - fieldSet["value"] = true - continue - } - - var desc *ExtensionDesc - // This could be faster, but it's functional. - // TODO: Do something smarter than a linear scan. - for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) { - if d.Name == extName { - desc = d - break - } - } - if desc == nil { - return p.errorf("unrecognized extension %q", extName) - } - - props := &Properties{} - props.Parse(desc.Tag) - - typ := reflect.TypeOf(desc.ExtensionType) - if err := p.checkForColon(props, typ); err != nil { - return err - } - - rep := desc.repeated() - - // Read the extension structure, and set it in - // the value we're constructing. - var ext reflect.Value - if !rep { - ext = reflect.New(typ).Elem() - } else { - ext = reflect.New(typ.Elem()).Elem() - } - if err := p.readAny(ext, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - ep := sv.Addr().Interface().(Message) - if !rep { - SetExtension(ep, desc, ext.Interface()) - } else { - old, err := GetExtension(ep, desc) - var sl reflect.Value - if err == nil { - sl = reflect.ValueOf(old) // existing slice - } else { - sl = reflect.MakeSlice(typ, 0, 1) - } - sl = reflect.Append(sl, ext) - SetExtension(ep, desc, sl.Interface()) - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - continue - } - - // This is a normal, non-extension field. - name := tok.value - var dst reflect.Value - fi, props, ok := structFieldByName(sprops, name) - if ok { - dst = sv.Field(fi) - } else if oop, ok := sprops.OneofTypes[name]; ok { - // It is a oneof. - props = oop.Prop - nv := reflect.New(oop.Type.Elem()) - dst = nv.Elem().Field(0) - field := sv.Field(oop.Field) - if !field.IsNil() { - return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name) - } - field.Set(nv) - } - if !dst.IsValid() { - return p.errorf("unknown field name %q in %v", name, st) - } - - if dst.Kind() == reflect.Map { - // Consume any colon. - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Construct the map if it doesn't already exist. - if dst.IsNil() { - dst.Set(reflect.MakeMap(dst.Type())) - } - key := reflect.New(dst.Type().Key()).Elem() - val := reflect.New(dst.Type().Elem()).Elem() - - // The map entry should be this sequence of tokens: - // < key : KEY value : VALUE > - // However, implementations may omit key or value, and technically - // we should support them in any order. See b/28924776 for a time - // this went wrong. - - tok := p.next() - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - switch tok.value { - case "key": - if err := p.consumeToken(":"); err != nil { - return err - } - if err := p.readAny(key, props.mkeyprop); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - case "value": - if err := p.checkForColon(props.mvalprop, dst.Type().Elem()); err != nil { - return err - } - if err := p.readAny(val, props.mvalprop); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - default: - p.back() - return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) - } - } - - dst.SetMapIndex(key, val) - continue - } - - // Check that it's not already set if it's not a repeated field. - if !props.Repeated && fieldSet[name] { - return p.errorf("non-repeated field %q was repeated", name) - } - - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Parse into the field. - fieldSet[name] = true - if err := p.readAny(dst, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - if props.Required { - reqCount-- - } - - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - - } - - if reqCount > 0 { - return p.missingRequiredFieldError(sv) - } - return reqFieldErr -} - -// consumeExtName consumes extension name or expanded Any type URL and the -// following ']'. It returns the name or URL consumed. -func (p *textParser) consumeExtName() (string, error) { - tok := p.next() - if tok.err != nil { - return "", tok.err - } - - // If extension name or type url is quoted, it's a single token. - if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { - name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) - if err != nil { - return "", err - } - return name, p.consumeToken("]") - } - - // Consume everything up to "]" - var parts []string - for tok.value != "]" { - parts = append(parts, tok.value) - tok = p.next() - if tok.err != nil { - return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) - } - } - return strings.Join(parts, ""), nil -} - -// consumeOptionalSeparator consumes an optional semicolon or comma. -// It is used in readStruct to provide backward compatibility. -func (p *textParser) consumeOptionalSeparator() error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ";" && tok.value != "," { - p.back() - } - return nil -} - -func (p *textParser) readAny(v reflect.Value, props *Properties) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == "" { - return p.errorf("unexpected EOF") - } - - switch fv := v; fv.Kind() { - case reflect.Slice: - at := v.Type() - if at.Elem().Kind() == reflect.Uint8 { - // Special case for []byte - if tok.value[0] != '"' && tok.value[0] != '\'' { - // Deliberately written out here, as the error after - // this switch statement would write "invalid []byte: ...", - // which is not as user-friendly. - return p.errorf("invalid string: %v", tok.value) - } - bytes := []byte(tok.unquoted) - fv.Set(reflect.ValueOf(bytes)) - return nil - } - // Repeated field. - if tok.value == "[" { - // Repeated field with list notation, like [1,2,3]. - for { - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - err := p.readAny(fv.Index(fv.Len()-1), props) - if err != nil { - return err - } - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == "]" { - break - } - if tok.value != "," { - return p.errorf("Expected ']' or ',' found %q", tok.value) - } - } - return nil - } - // One value of the repeated field. - p.back() - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - return p.readAny(fv.Index(fv.Len()-1), props) - case reflect.Bool: - // true/1/t/True or false/f/0/False. - switch tok.value { - case "true", "1", "t", "True": - fv.SetBool(true) - return nil - case "false", "0", "f", "False": - fv.SetBool(false) - return nil - } - case reflect.Float32, reflect.Float64: - v := tok.value - // Ignore 'f' for compatibility with output generated by C++, but don't - // remove 'f' when the value is "-inf" or "inf". - if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" { - v = v[:len(v)-1] - } - if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil { - fv.SetFloat(f) - return nil - } - case reflect.Int32: - if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { - fv.SetInt(x) - return nil - } - - if len(props.Enum) == 0 { - break - } - m, ok := enumValueMaps[props.Enum] - if !ok { - break - } - x, ok := m[tok.value] - if !ok { - break - } - fv.SetInt(int64(x)) - return nil - case reflect.Int64: - if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { - fv.SetInt(x) - return nil - } - - case reflect.Ptr: - // A basic field (indirected through pointer), or a repeated message/group - p.back() - fv.Set(reflect.New(fv.Type().Elem())) - return p.readAny(fv.Elem(), props) - case reflect.String: - if tok.value[0] == '"' || tok.value[0] == '\'' { - fv.SetString(tok.unquoted) - return nil - } - case reflect.Struct: - var terminator string - switch tok.value { - case "{": - terminator = "}" - case "<": - terminator = ">" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - // TODO: Handle nested messages which implement encoding.TextUnmarshaler. - return p.readStruct(fv, terminator) - case reflect.Uint32: - if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { - fv.SetUint(uint64(x)) - return nil - } - case reflect.Uint64: - if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { - fv.SetUint(x) - return nil - } - } - return p.errorf("invalid %v: %v", v.Type(), tok.value) -} - -// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb -// before starting to unmarshal, so any existing data in pb is always removed. -// If a required field is not set and no other error occurs, -// UnmarshalText returns *RequiredNotSetError. -func UnmarshalText(s string, pb Message) error { - if um, ok := pb.(encoding.TextUnmarshaler); ok { - err := um.UnmarshalText([]byte(s)) - return err - } - pb.Reset() - v := reflect.ValueOf(pb) - if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil { - return pe - } - return nil -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile deleted file mode 100644 index 4942418..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but descriptor.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/descriptor.proto -regenerate: - echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=. -I$(HOME)/src/protobuf/src $(HOME)/src/protobuf/src/google/protobuf/descriptor.proto && \ - sed 's,^package google_protobuf,package descriptor,' google/protobuf/descriptor.pb.go > \ - $(GOPATH)/src/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go && \ - rm -f google/protobuf/descriptor.pb.go diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go deleted file mode 100644 index a1d8a76..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go +++ /dev/null @@ -1,2065 +0,0 @@ -// Code generated by protoc-gen-go. -// source: google/protobuf/descriptor.proto -// DO NOT EDIT! - -/* -Package descriptor is a generated protocol buffer package. - -It is generated from these files: - google/protobuf/descriptor.proto - -It has these top-level messages: - FileDescriptorSet - FileDescriptorProto - DescriptorProto - FieldDescriptorProto - OneofDescriptorProto - EnumDescriptorProto - EnumValueDescriptorProto - ServiceDescriptorProto - MethodDescriptorProto - FileOptions - MessageOptions - FieldOptions - OneofOptions - EnumOptions - EnumValueOptions - ServiceOptions - MethodOptions - UninterpretedOption - SourceCodeInfo - GeneratedCodeInfo -*/ -package descriptor - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type FieldDescriptorProto_Type int32 - -const ( - // 0 is reserved for errors. - // Order is weird for historical reasons. - FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1 - FieldDescriptorProto_TYPE_FLOAT FieldDescriptorProto_Type = 2 - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if - // negative values are likely. - FieldDescriptorProto_TYPE_INT64 FieldDescriptorProto_Type = 3 - FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4 - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if - // negative values are likely. - FieldDescriptorProto_TYPE_INT32 FieldDescriptorProto_Type = 5 - FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6 - FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7 - FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8 - FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9 - FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10 - FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11 - // New in version 2. - FieldDescriptorProto_TYPE_BYTES FieldDescriptorProto_Type = 12 - FieldDescriptorProto_TYPE_UINT32 FieldDescriptorProto_Type = 13 - FieldDescriptorProto_TYPE_ENUM FieldDescriptorProto_Type = 14 - FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15 - FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16 - FieldDescriptorProto_TYPE_SINT32 FieldDescriptorProto_Type = 17 - FieldDescriptorProto_TYPE_SINT64 FieldDescriptorProto_Type = 18 -) - -var FieldDescriptorProto_Type_name = map[int32]string{ - 1: "TYPE_DOUBLE", - 2: "TYPE_FLOAT", - 3: "TYPE_INT64", - 4: "TYPE_UINT64", - 5: "TYPE_INT32", - 6: "TYPE_FIXED64", - 7: "TYPE_FIXED32", - 8: "TYPE_BOOL", - 9: "TYPE_STRING", - 10: "TYPE_GROUP", - 11: "TYPE_MESSAGE", - 12: "TYPE_BYTES", - 13: "TYPE_UINT32", - 14: "TYPE_ENUM", - 15: "TYPE_SFIXED32", - 16: "TYPE_SFIXED64", - 17: "TYPE_SINT32", - 18: "TYPE_SINT64", -} -var FieldDescriptorProto_Type_value = map[string]int32{ - "TYPE_DOUBLE": 1, - "TYPE_FLOAT": 2, - "TYPE_INT64": 3, - "TYPE_UINT64": 4, - "TYPE_INT32": 5, - "TYPE_FIXED64": 6, - "TYPE_FIXED32": 7, - "TYPE_BOOL": 8, - "TYPE_STRING": 9, - "TYPE_GROUP": 10, - "TYPE_MESSAGE": 11, - "TYPE_BYTES": 12, - "TYPE_UINT32": 13, - "TYPE_ENUM": 14, - "TYPE_SFIXED32": 15, - "TYPE_SFIXED64": 16, - "TYPE_SINT32": 17, - "TYPE_SINT64": 18, -} - -func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { - p := new(FieldDescriptorProto_Type) - *p = x - return p -} -func (x FieldDescriptorProto_Type) String() string { - return proto.EnumName(FieldDescriptorProto_Type_name, int32(x)) -} -func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type") - if err != nil { - return err - } - *x = FieldDescriptorProto_Type(value) - return nil -} -func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{3, 0} } - -type FieldDescriptorProto_Label int32 - -const ( - // 0 is reserved for errors - FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1 - FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2 - FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3 -) - -var FieldDescriptorProto_Label_name = map[int32]string{ - 1: "LABEL_OPTIONAL", - 2: "LABEL_REQUIRED", - 3: "LABEL_REPEATED", -} -var FieldDescriptorProto_Label_value = map[string]int32{ - "LABEL_OPTIONAL": 1, - "LABEL_REQUIRED": 2, - "LABEL_REPEATED": 3, -} - -func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { - p := new(FieldDescriptorProto_Label) - *p = x - return p -} -func (x FieldDescriptorProto_Label) String() string { - return proto.EnumName(FieldDescriptorProto_Label_name, int32(x)) -} -func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label") - if err != nil { - return err - } - *x = FieldDescriptorProto_Label(value) - return nil -} -func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { - return fileDescriptor0, []int{3, 1} -} - -// Generated classes can be optimized for speed or code size. -type FileOptions_OptimizeMode int32 - -const ( - FileOptions_SPEED FileOptions_OptimizeMode = 1 - // etc. - FileOptions_CODE_SIZE FileOptions_OptimizeMode = 2 - FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3 -) - -var FileOptions_OptimizeMode_name = map[int32]string{ - 1: "SPEED", - 2: "CODE_SIZE", - 3: "LITE_RUNTIME", -} -var FileOptions_OptimizeMode_value = map[string]int32{ - "SPEED": 1, - "CODE_SIZE": 2, - "LITE_RUNTIME": 3, -} - -func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { - p := new(FileOptions_OptimizeMode) - *p = x - return p -} -func (x FileOptions_OptimizeMode) String() string { - return proto.EnumName(FileOptions_OptimizeMode_name, int32(x)) -} -func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode") - if err != nil { - return err - } - *x = FileOptions_OptimizeMode(value) - return nil -} -func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } - -type FieldOptions_CType int32 - -const ( - // Default mode. - FieldOptions_STRING FieldOptions_CType = 0 - FieldOptions_CORD FieldOptions_CType = 1 - FieldOptions_STRING_PIECE FieldOptions_CType = 2 -) - -var FieldOptions_CType_name = map[int32]string{ - 0: "STRING", - 1: "CORD", - 2: "STRING_PIECE", -} -var FieldOptions_CType_value = map[string]int32{ - "STRING": 0, - "CORD": 1, - "STRING_PIECE": 2, -} - -func (x FieldOptions_CType) Enum() *FieldOptions_CType { - p := new(FieldOptions_CType) - *p = x - return p -} -func (x FieldOptions_CType) String() string { - return proto.EnumName(FieldOptions_CType_name, int32(x)) -} -func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType") - if err != nil { - return err - } - *x = FieldOptions_CType(value) - return nil -} -func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 0} } - -type FieldOptions_JSType int32 - -const ( - // Use the default type. - FieldOptions_JS_NORMAL FieldOptions_JSType = 0 - // Use JavaScript strings. - FieldOptions_JS_STRING FieldOptions_JSType = 1 - // Use JavaScript numbers. - FieldOptions_JS_NUMBER FieldOptions_JSType = 2 -) - -var FieldOptions_JSType_name = map[int32]string{ - 0: "JS_NORMAL", - 1: "JS_STRING", - 2: "JS_NUMBER", -} -var FieldOptions_JSType_value = map[string]int32{ - "JS_NORMAL": 0, - "JS_STRING": 1, - "JS_NUMBER": 2, -} - -func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { - p := new(FieldOptions_JSType) - *p = x - return p -} -func (x FieldOptions_JSType) String() string { - return proto.EnumName(FieldOptions_JSType_name, int32(x)) -} -func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType") - if err != nil { - return err - } - *x = FieldOptions_JSType(value) - return nil -} -func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{11, 1} } - -// The protocol compiler can output a FileDescriptorSet containing the .proto -// files it parses. -type FileDescriptorSet struct { - File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } -func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorSet) ProtoMessage() {} -func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { - if m != nil { - return m.File - } - return nil -} - -// Describes a complete .proto file. -type FileDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` - // Names of files imported by this file. - Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"` - // Indexes of the public imported files in the dependency list above. - PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"` - // Indexes of the weak imported files in the dependency list. - // For Google-internal migration only. Do not use. - WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` - // All top-level definitions in this file. - MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` - EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` - Service []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"` - Extension []*FieldDescriptorProto `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"` - Options *FileOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` - // This field contains optional information about the original source code. - // You may safely remove this entire field without harming runtime - // functionality of the descriptors -- the information is needed only by - // development tools. - SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` - // The syntax of the proto file. - // The supported values are "proto2" and "proto3". - Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } -func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FileDescriptorProto) ProtoMessage() {} -func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *FileDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *FileDescriptorProto) GetPackage() string { - if m != nil && m.Package != nil { - return *m.Package - } - return "" -} - -func (m *FileDescriptorProto) GetDependency() []string { - if m != nil { - return m.Dependency - } - return nil -} - -func (m *FileDescriptorProto) GetPublicDependency() []int32 { - if m != nil { - return m.PublicDependency - } - return nil -} - -func (m *FileDescriptorProto) GetWeakDependency() []int32 { - if m != nil { - return m.WeakDependency - } - return nil -} - -func (m *FileDescriptorProto) GetMessageType() []*DescriptorProto { - if m != nil { - return m.MessageType - } - return nil -} - -func (m *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto { - if m != nil { - return m.EnumType - } - return nil -} - -func (m *FileDescriptorProto) GetService() []*ServiceDescriptorProto { - if m != nil { - return m.Service - } - return nil -} - -func (m *FileDescriptorProto) GetExtension() []*FieldDescriptorProto { - if m != nil { - return m.Extension - } - return nil -} - -func (m *FileDescriptorProto) GetOptions() *FileOptions { - if m != nil { - return m.Options - } - return nil -} - -func (m *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo { - if m != nil { - return m.SourceCodeInfo - } - return nil -} - -func (m *FileDescriptorProto) GetSyntax() string { - if m != nil && m.Syntax != nil { - return *m.Syntax - } - return "" -} - -// Describes a message type. -type DescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Field []*FieldDescriptorProto `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"` - Extension []*FieldDescriptorProto `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"` - NestedType []*DescriptorProto `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"` - EnumType []*EnumDescriptorProto `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` - ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"` - OneofDecl []*OneofDescriptorProto `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"` - Options *MessageOptions `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"` - ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` - // Reserved field names, which may not be used by fields in the same message. - // A given name may only be reserved once. - ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } -func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } -func (*DescriptorProto) ProtoMessage() {} -func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } - -func (m *DescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *DescriptorProto) GetField() []*FieldDescriptorProto { - if m != nil { - return m.Field - } - return nil -} - -func (m *DescriptorProto) GetExtension() []*FieldDescriptorProto { - if m != nil { - return m.Extension - } - return nil -} - -func (m *DescriptorProto) GetNestedType() []*DescriptorProto { - if m != nil { - return m.NestedType - } - return nil -} - -func (m *DescriptorProto) GetEnumType() []*EnumDescriptorProto { - if m != nil { - return m.EnumType - } - return nil -} - -func (m *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange { - if m != nil { - return m.ExtensionRange - } - return nil -} - -func (m *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto { - if m != nil { - return m.OneofDecl - } - return nil -} - -func (m *DescriptorProto) GetOptions() *MessageOptions { - if m != nil { - return m.Options - } - return nil -} - -func (m *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange { - if m != nil { - return m.ReservedRange - } - return nil -} - -func (m *DescriptorProto) GetReservedName() []string { - if m != nil { - return m.ReservedName - } - return nil -} - -type DescriptorProto_ExtensionRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } -func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } -func (*DescriptorProto_ExtensionRange) ProtoMessage() {} -func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 0} -} - -func (m *DescriptorProto_ExtensionRange) GetStart() int32 { - if m != nil && m.Start != nil { - return *m.Start - } - return 0 -} - -func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { - if m != nil && m.End != nil { - return *m.End - } - return 0 -} - -// Range of reserved tag numbers. Reserved tag numbers may not be used by -// fields or extension ranges in the same message. Reserved ranges may -// not overlap. -type DescriptorProto_ReservedRange struct { - Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } -func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } -func (*DescriptorProto_ReservedRange) ProtoMessage() {} -func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{2, 1} -} - -func (m *DescriptorProto_ReservedRange) GetStart() int32 { - if m != nil && m.Start != nil { - return *m.Start - } - return 0 -} - -func (m *DescriptorProto_ReservedRange) GetEnd() int32 { - if m != nil && m.End != nil { - return *m.End - } - return 0 -} - -// Describes a field within a message. -type FieldDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Number *int32 `protobuf:"varint,3,opt,name=number" json:"number,omitempty"` - Label *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"` - // If type_name is set, this need not be set. If both this and type_name - // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. - Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"` - // For message and enum types, this is the name of the type. If the name - // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping - // rules are used to find the type (i.e. first the nested types within this - // message are searched, then within the parent, on up to the root - // namespace). - TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"` - // For extensions, this is the name of the type being extended. It is - // resolved in the same manner as type_name. - Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"` - // For numeric types, contains the original text representation of the value. - // For booleans, "true" or "false". - // For strings, contains the default text contents (not escaped in any way). - // For bytes, contains the C escaped value. All bytes >= 128 are escaped. - // TODO(kenton): Base-64 encode? - DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` - // If set, gives the index of a oneof in the containing type's oneof_decl - // list. This field is a member of that oneof. - OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"` - // JSON name of this field. The value is set by protocol compiler. If the - // user has set a "json_name" option on this field, that option's value - // will be used. Otherwise, it's deduced from the field's name by converting - // it to camelCase. - JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` - Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } -func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*FieldDescriptorProto) ProtoMessage() {} -func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *FieldDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *FieldDescriptorProto) GetNumber() int32 { - if m != nil && m.Number != nil { - return *m.Number - } - return 0 -} - -func (m *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label { - if m != nil && m.Label != nil { - return *m.Label - } - return FieldDescriptorProto_LABEL_OPTIONAL -} - -func (m *FieldDescriptorProto) GetType() FieldDescriptorProto_Type { - if m != nil && m.Type != nil { - return *m.Type - } - return FieldDescriptorProto_TYPE_DOUBLE -} - -func (m *FieldDescriptorProto) GetTypeName() string { - if m != nil && m.TypeName != nil { - return *m.TypeName - } - return "" -} - -func (m *FieldDescriptorProto) GetExtendee() string { - if m != nil && m.Extendee != nil { - return *m.Extendee - } - return "" -} - -func (m *FieldDescriptorProto) GetDefaultValue() string { - if m != nil && m.DefaultValue != nil { - return *m.DefaultValue - } - return "" -} - -func (m *FieldDescriptorProto) GetOneofIndex() int32 { - if m != nil && m.OneofIndex != nil { - return *m.OneofIndex - } - return 0 -} - -func (m *FieldDescriptorProto) GetJsonName() string { - if m != nil && m.JsonName != nil { - return *m.JsonName - } - return "" -} - -func (m *FieldDescriptorProto) GetOptions() *FieldOptions { - if m != nil { - return m.Options - } - return nil -} - -// Describes a oneof. -type OneofDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } -func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*OneofDescriptorProto) ProtoMessage() {} -func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } - -func (m *OneofDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *OneofDescriptorProto) GetOptions() *OneofOptions { - if m != nil { - return m.Options - } - return nil -} - -// Describes an enum type. -type EnumDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` - Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } -func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumDescriptorProto) ProtoMessage() {} -func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } - -func (m *EnumDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto { - if m != nil { - return m.Value - } - return nil -} - -func (m *EnumDescriptorProto) GetOptions() *EnumOptions { - if m != nil { - return m.Options - } - return nil -} - -// Describes a value within an enum. -type EnumValueDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` - Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } -func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*EnumValueDescriptorProto) ProtoMessage() {} -func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } - -func (m *EnumValueDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *EnumValueDescriptorProto) GetNumber() int32 { - if m != nil && m.Number != nil { - return *m.Number - } - return 0 -} - -func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { - if m != nil { - return m.Options - } - return nil -} - -// Describes a service. -type ServiceDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` - Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } -func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*ServiceDescriptorProto) ProtoMessage() {} -func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } - -func (m *ServiceDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto { - if m != nil { - return m.Method - } - return nil -} - -func (m *ServiceDescriptorProto) GetOptions() *ServiceOptions { - if m != nil { - return m.Options - } - return nil -} - -// Describes a method of a service. -type MethodDescriptorProto struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // Input and output type names. These are resolved in the same way as - // FieldDescriptorProto.type_name, but must refer to a message type. - InputType *string `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"` - OutputType *string `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"` - Options *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"` - // Identifies if client streams multiple client messages - ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` - // Identifies if server streams multiple server messages - ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } -func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } -func (*MethodDescriptorProto) ProtoMessage() {} -func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } - -const Default_MethodDescriptorProto_ClientStreaming bool = false -const Default_MethodDescriptorProto_ServerStreaming bool = false - -func (m *MethodDescriptorProto) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *MethodDescriptorProto) GetInputType() string { - if m != nil && m.InputType != nil { - return *m.InputType - } - return "" -} - -func (m *MethodDescriptorProto) GetOutputType() string { - if m != nil && m.OutputType != nil { - return *m.OutputType - } - return "" -} - -func (m *MethodDescriptorProto) GetOptions() *MethodOptions { - if m != nil { - return m.Options - } - return nil -} - -func (m *MethodDescriptorProto) GetClientStreaming() bool { - if m != nil && m.ClientStreaming != nil { - return *m.ClientStreaming - } - return Default_MethodDescriptorProto_ClientStreaming -} - -func (m *MethodDescriptorProto) GetServerStreaming() bool { - if m != nil && m.ServerStreaming != nil { - return *m.ServerStreaming - } - return Default_MethodDescriptorProto_ServerStreaming -} - -type FileOptions struct { - // Sets the Java package where classes generated from this .proto will be - // placed. By default, the proto package is used, but this is often - // inappropriate because proto packages do not normally start with backwards - // domain names. - JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"` - // If set, all the classes from the .proto file are wrapped in a single - // outer class with the given name. This applies to both Proto1 - // (equivalent to the old "--one_java_file" option) and Proto2 (where - // a .proto always translates to a single class, but you may want to - // explicitly choose the class name). - JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"` - // If set true, then the Java code generator will generate a separate .java - // file for each top-level message, enum, and service defined in the .proto - // file. Thus, these types will *not* be nested inside the outer class - // named by java_outer_classname. However, the outer class will still be - // generated to contain the file's getDescriptor() method as well as any - // top-level extensions defined in the file. - JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` - // This option does nothing. - JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` - // If set true, then the Java2 code generator will generate code that - // throws an exception whenever an attempt is made to assign a non-UTF-8 - // byte sequence to a string field. - // Message reflection will do the same. - // However, an extension field still accepts non-UTF-8 byte sequences. - // This option has no effect on when used with the lite runtime. - JavaStringCheckUtf8 *bool `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"` - OptimizeFor *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"` - // Sets the Go package where structs generated from this .proto will be - // placed. If omitted, the Go package will be derived from the following: - // - The basename of the package import path, if provided. - // - Otherwise, the package statement in the .proto file, if present. - // - Otherwise, the basename of the .proto file, without extension. - GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"` - // Should generic services be generated in each language? "Generic" services - // are not specific to any particular RPC system. They are generated by the - // main code generators in each language (without additional plugins). - // Generic services were the only kind of service generation supported by - // early versions of google.protobuf. - // - // Generic services are now considered deprecated in favor of using plugins - // that generate code specific to your particular RPC system. Therefore, - // these default to false. Old code which depends on generic services should - // explicitly set them to true. - CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` - JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` - PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` - // Is this file deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for everything in the file, or it will be completely ignored; in the very - // least, this is a formalization for deprecating files. - Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // Enables the use of arenas for the proto messages in this file. This applies - // only to generated classes for C++. - CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=0" json:"cc_enable_arenas,omitempty"` - // Sets the objective c class prefix which is prepended to all objective c - // generated classes from this .proto. There is no default. - ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"` - // Namespace for generated classes; defaults to the package. - CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FileOptions) Reset() { *m = FileOptions{} } -func (m *FileOptions) String() string { return proto.CompactTextString(m) } -func (*FileOptions) ProtoMessage() {} -func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } - -var extRange_FileOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_FileOptions -} - -const Default_FileOptions_JavaMultipleFiles bool = false -const Default_FileOptions_JavaStringCheckUtf8 bool = false -const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED -const Default_FileOptions_CcGenericServices bool = false -const Default_FileOptions_JavaGenericServices bool = false -const Default_FileOptions_PyGenericServices bool = false -const Default_FileOptions_Deprecated bool = false -const Default_FileOptions_CcEnableArenas bool = false - -func (m *FileOptions) GetJavaPackage() string { - if m != nil && m.JavaPackage != nil { - return *m.JavaPackage - } - return "" -} - -func (m *FileOptions) GetJavaOuterClassname() string { - if m != nil && m.JavaOuterClassname != nil { - return *m.JavaOuterClassname - } - return "" -} - -func (m *FileOptions) GetJavaMultipleFiles() bool { - if m != nil && m.JavaMultipleFiles != nil { - return *m.JavaMultipleFiles - } - return Default_FileOptions_JavaMultipleFiles -} - -func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { - if m != nil && m.JavaGenerateEqualsAndHash != nil { - return *m.JavaGenerateEqualsAndHash - } - return false -} - -func (m *FileOptions) GetJavaStringCheckUtf8() bool { - if m != nil && m.JavaStringCheckUtf8 != nil { - return *m.JavaStringCheckUtf8 - } - return Default_FileOptions_JavaStringCheckUtf8 -} - -func (m *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode { - if m != nil && m.OptimizeFor != nil { - return *m.OptimizeFor - } - return Default_FileOptions_OptimizeFor -} - -func (m *FileOptions) GetGoPackage() string { - if m != nil && m.GoPackage != nil { - return *m.GoPackage - } - return "" -} - -func (m *FileOptions) GetCcGenericServices() bool { - if m != nil && m.CcGenericServices != nil { - return *m.CcGenericServices - } - return Default_FileOptions_CcGenericServices -} - -func (m *FileOptions) GetJavaGenericServices() bool { - if m != nil && m.JavaGenericServices != nil { - return *m.JavaGenericServices - } - return Default_FileOptions_JavaGenericServices -} - -func (m *FileOptions) GetPyGenericServices() bool { - if m != nil && m.PyGenericServices != nil { - return *m.PyGenericServices - } - return Default_FileOptions_PyGenericServices -} - -func (m *FileOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_FileOptions_Deprecated -} - -func (m *FileOptions) GetCcEnableArenas() bool { - if m != nil && m.CcEnableArenas != nil { - return *m.CcEnableArenas - } - return Default_FileOptions_CcEnableArenas -} - -func (m *FileOptions) GetObjcClassPrefix() string { - if m != nil && m.ObjcClassPrefix != nil { - return *m.ObjcClassPrefix - } - return "" -} - -func (m *FileOptions) GetCsharpNamespace() string { - if m != nil && m.CsharpNamespace != nil { - return *m.CsharpNamespace - } - return "" -} - -func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type MessageOptions struct { - // Set true to use the old proto1 MessageSet wire format for extensions. - // This is provided for backwards-compatibility with the MessageSet wire - // format. You should not use this for any other reason: It's less - // efficient, has fewer features, and is more complicated. - // - // The message must be defined exactly as follows: - // message Foo { - // option message_set_wire_format = true; - // extensions 4 to max; - // } - // Note that the message cannot have any defined fields; MessageSets only - // have extensions. - // - // All extensions of your type must be singular messages; e.g. they cannot - // be int32s, enums, or repeated messages. - // - // Because this is an option, the above two restrictions are not enforced by - // the protocol compiler. - MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"` - // Disables the generation of the standard "descriptor()" accessor, which can - // conflict with a field of the same name. This is meant to make migration - // from proto1 easier; new code should avoid fields named "descriptor". - NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"` - // Is this message deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the message, or it will be completely ignored; in the very least, - // this is a formalization for deprecating messages. - Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // Whether the message is an automatically generated map entry type for the - // maps field. - // - // For maps fields: - // map map_field = 1; - // The parsed descriptor looks like: - // message MapFieldEntry { - // option map_entry = true; - // optional KeyType key = 1; - // optional ValueType value = 2; - // } - // repeated MapFieldEntry map_field = 1; - // - // Implementations may choose not to generate the map_entry=true message, but - // use a native map in the target language to hold the keys and values. - // The reflection APIs in such implementions still need to work as - // if the field is a repeated message field. - // - // NOTE: Do not set the option in .proto files. Always use the maps syntax - // instead. The option should only be implicitly set by the proto compiler - // parser. - MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MessageOptions) Reset() { *m = MessageOptions{} } -func (m *MessageOptions) String() string { return proto.CompactTextString(m) } -func (*MessageOptions) ProtoMessage() {} -func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } - -var extRange_MessageOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MessageOptions -} - -const Default_MessageOptions_MessageSetWireFormat bool = false -const Default_MessageOptions_NoStandardDescriptorAccessor bool = false -const Default_MessageOptions_Deprecated bool = false - -func (m *MessageOptions) GetMessageSetWireFormat() bool { - if m != nil && m.MessageSetWireFormat != nil { - return *m.MessageSetWireFormat - } - return Default_MessageOptions_MessageSetWireFormat -} - -func (m *MessageOptions) GetNoStandardDescriptorAccessor() bool { - if m != nil && m.NoStandardDescriptorAccessor != nil { - return *m.NoStandardDescriptorAccessor - } - return Default_MessageOptions_NoStandardDescriptorAccessor -} - -func (m *MessageOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_MessageOptions_Deprecated -} - -func (m *MessageOptions) GetMapEntry() bool { - if m != nil && m.MapEntry != nil { - return *m.MapEntry - } - return false -} - -func (m *MessageOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type FieldOptions struct { - // The ctype option instructs the C++ code generator to use a different - // representation of the field than it normally would. See the specific - // options below. This option is not yet implemented in the open source - // release -- sorry, we'll try to include it in a future version! - Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"` - // The packed option can be enabled for repeated primitive fields to enable - // a more efficient representation on the wire. Rather than repeatedly - // writing the tag and type for each element, the entire array is encoded as - // a single length-delimited blob. In proto3, only explicit setting it to - // false will avoid using packed encoding. - Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` - // The jstype option determines the JavaScript type used for values of the - // field. The option is permitted only for 64 bit integral and fixed types - // (int64, uint64, sint64, fixed64, sfixed64). By default these types are - // represented as JavaScript strings. This avoids loss of precision that can - // happen when a large value is converted to a floating point JavaScript - // numbers. Specifying JS_NUMBER for the jstype causes the generated - // JavaScript code to use the JavaScript "number" type instead of strings. - // This option is an enum to permit additional types to be added, - // e.g. goog.math.Integer. - Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` - // Should this field be parsed lazily? Lazy applies only to message-type - // fields. It means that when the outer message is initially parsed, the - // inner message's contents will not be parsed but instead stored in encoded - // form. The inner message will actually be parsed when it is first accessed. - // - // This is only a hint. Implementations are free to choose whether to use - // eager or lazy parsing regardless of the value of this option. However, - // setting this option true suggests that the protocol author believes that - // using lazy parsing on this field is worth the additional bookkeeping - // overhead typically needed to implement it. - // - // This option does not affect the public interface of any generated code; - // all method signatures remain the same. Furthermore, thread-safety of the - // interface is not affected by this option; const methods remain safe to - // call from multiple threads concurrently, while non-const methods continue - // to require exclusive access. - // - // - // Note that implementations may choose not to check required fields within - // a lazy sub-message. That is, calling IsInitialized() on the outer message - // may return true even if the inner message has missing required fields. - // This is necessary because otherwise the inner message would have to be - // parsed in order to perform the check, defeating the purpose of lazy - // parsing. An implementation which chooses not to check required fields - // must be consistent about it. That is, for any particular sub-message, the - // implementation must either *always* check its required fields, or *never* - // check its required fields, regardless of whether or not the message has - // been parsed. - Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"` - // Is this field deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for accessors, or it will be completely ignored; in the very least, this - // is a formalization for deprecating fields. - Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // For Google-internal migration only. Do not use. - Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *FieldOptions) Reset() { *m = FieldOptions{} } -func (m *FieldOptions) String() string { return proto.CompactTextString(m) } -func (*FieldOptions) ProtoMessage() {} -func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } - -var extRange_FieldOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_FieldOptions -} - -const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING -const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL -const Default_FieldOptions_Lazy bool = false -const Default_FieldOptions_Deprecated bool = false -const Default_FieldOptions_Weak bool = false - -func (m *FieldOptions) GetCtype() FieldOptions_CType { - if m != nil && m.Ctype != nil { - return *m.Ctype - } - return Default_FieldOptions_Ctype -} - -func (m *FieldOptions) GetPacked() bool { - if m != nil && m.Packed != nil { - return *m.Packed - } - return false -} - -func (m *FieldOptions) GetJstype() FieldOptions_JSType { - if m != nil && m.Jstype != nil { - return *m.Jstype - } - return Default_FieldOptions_Jstype -} - -func (m *FieldOptions) GetLazy() bool { - if m != nil && m.Lazy != nil { - return *m.Lazy - } - return Default_FieldOptions_Lazy -} - -func (m *FieldOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_FieldOptions_Deprecated -} - -func (m *FieldOptions) GetWeak() bool { - if m != nil && m.Weak != nil { - return *m.Weak - } - return Default_FieldOptions_Weak -} - -func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type OneofOptions struct { - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *OneofOptions) Reset() { *m = OneofOptions{} } -func (m *OneofOptions) String() string { return proto.CompactTextString(m) } -func (*OneofOptions) ProtoMessage() {} -func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } - -var extRange_OneofOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_OneofOptions -} - -func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type EnumOptions struct { - // Set this option to true to allow mapping different tag names to the same - // value. - AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"` - // Is this enum deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the enum, or it will be completely ignored; in the very least, this - // is a formalization for deprecating enums. - Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *EnumOptions) Reset() { *m = EnumOptions{} } -func (m *EnumOptions) String() string { return proto.CompactTextString(m) } -func (*EnumOptions) ProtoMessage() {} -func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } - -var extRange_EnumOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_EnumOptions -} - -const Default_EnumOptions_Deprecated bool = false - -func (m *EnumOptions) GetAllowAlias() bool { - if m != nil && m.AllowAlias != nil { - return *m.AllowAlias - } - return false -} - -func (m *EnumOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_EnumOptions_Deprecated -} - -func (m *EnumOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type EnumValueOptions struct { - // Is this enum value deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the enum value, or it will be completely ignored; in the very least, - // this is a formalization for deprecating enum values. - Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } -func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } -func (*EnumValueOptions) ProtoMessage() {} -func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } - -var extRange_EnumValueOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_EnumValueOptions -} - -const Default_EnumValueOptions_Deprecated bool = false - -func (m *EnumValueOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_EnumValueOptions_Deprecated -} - -func (m *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type ServiceOptions struct { - // Is this service deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the service, or it will be completely ignored; in the very least, - // this is a formalization for deprecating services. - Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } -func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } -func (*ServiceOptions) ProtoMessage() {} -func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } - -var extRange_ServiceOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_ServiceOptions -} - -const Default_ServiceOptions_Deprecated bool = false - -func (m *ServiceOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_ServiceOptions_Deprecated -} - -func (m *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -type MethodOptions struct { - // Is this method deprecated? - // Depending on the target platform, this can emit Deprecated annotations - // for the method, or it will be completely ignored; in the very least, - // this is a formalization for deprecating methods. - Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` - // The parser stores options it doesn't recognize here. See above. - UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` - proto.XXX_InternalExtensions `json:"-"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *MethodOptions) Reset() { *m = MethodOptions{} } -func (m *MethodOptions) String() string { return proto.CompactTextString(m) } -func (*MethodOptions) ProtoMessage() {} -func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } - -var extRange_MethodOptions = []proto.ExtensionRange{ - {1000, 536870911}, -} - -func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { - return extRange_MethodOptions -} - -const Default_MethodOptions_Deprecated bool = false - -func (m *MethodOptions) GetDeprecated() bool { - if m != nil && m.Deprecated != nil { - return *m.Deprecated - } - return Default_MethodOptions_Deprecated -} - -func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption { - if m != nil { - return m.UninterpretedOption - } - return nil -} - -// A message representing a option the parser does not recognize. This only -// appears in options protos created by the compiler::Parser class. -// DescriptorPool resolves these when building Descriptor objects. Therefore, -// options protos in descriptor objects (e.g. returned by Descriptor::options(), -// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions -// in them. -type UninterpretedOption struct { - Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` - // The value of the uninterpreted option, in whatever type the tokenizer - // identified it as during parsing. Exactly one of these should be set. - IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` - PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` - NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` - DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` - StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` - AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } -func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } -func (*UninterpretedOption) ProtoMessage() {} -func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } - -func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { - if m != nil { - return m.Name - } - return nil -} - -func (m *UninterpretedOption) GetIdentifierValue() string { - if m != nil && m.IdentifierValue != nil { - return *m.IdentifierValue - } - return "" -} - -func (m *UninterpretedOption) GetPositiveIntValue() uint64 { - if m != nil && m.PositiveIntValue != nil { - return *m.PositiveIntValue - } - return 0 -} - -func (m *UninterpretedOption) GetNegativeIntValue() int64 { - if m != nil && m.NegativeIntValue != nil { - return *m.NegativeIntValue - } - return 0 -} - -func (m *UninterpretedOption) GetDoubleValue() float64 { - if m != nil && m.DoubleValue != nil { - return *m.DoubleValue - } - return 0 -} - -func (m *UninterpretedOption) GetStringValue() []byte { - if m != nil { - return m.StringValue - } - return nil -} - -func (m *UninterpretedOption) GetAggregateValue() string { - if m != nil && m.AggregateValue != nil { - return *m.AggregateValue - } - return "" -} - -// The name of the uninterpreted option. Each string represents a segment in -// a dot-separated name. is_extension is true iff a segment represents an -// extension (denoted with parentheses in options specs in .proto files). -// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents -// "foo.(bar.baz).qux". -type UninterpretedOption_NamePart struct { - NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` - IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } -func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } -func (*UninterpretedOption_NamePart) ProtoMessage() {} -func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{17, 0} -} - -func (m *UninterpretedOption_NamePart) GetNamePart() string { - if m != nil && m.NamePart != nil { - return *m.NamePart - } - return "" -} - -func (m *UninterpretedOption_NamePart) GetIsExtension() bool { - if m != nil && m.IsExtension != nil { - return *m.IsExtension - } - return false -} - -// Encapsulates information about the original source file from which a -// FileDescriptorProto was generated. -type SourceCodeInfo struct { - // A Location identifies a piece of source code in a .proto file which - // corresponds to a particular definition. This information is intended - // to be useful to IDEs, code indexers, documentation generators, and similar - // tools. - // - // For example, say we have a file like: - // message Foo { - // optional string foo = 1; - // } - // Let's look at just the field definition: - // optional string foo = 1; - // ^ ^^ ^^ ^ ^^^ - // a bc de f ghi - // We have the following locations: - // span path represents - // [a,i) [ 4, 0, 2, 0 ] The whole field definition. - // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). - // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). - // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). - // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). - // - // Notes: - // - A location may refer to a repeated field itself (i.e. not to any - // particular index within it). This is used whenever a set of elements are - // logically enclosed in a single code segment. For example, an entire - // extend block (possibly containing multiple extension definitions) will - // have an outer location whose path refers to the "extensions" repeated - // field without an index. - // - Multiple locations may have the same path. This happens when a single - // logical declaration is spread out across multiple places. The most - // obvious example is the "extend" block again -- there may be multiple - // extend blocks in the same scope, each of which will have the same path. - // - A location's span is not always a subset of its parent's span. For - // example, the "extendee" of an extension declaration appears at the - // beginning of the "extend" block and is shared by all extensions within - // the block. - // - Just because a location's span is a subset of some other location's span - // does not mean that it is a descendent. For example, a "group" defines - // both a type and a field in a single declaration. Thus, the locations - // corresponding to the type and field and their components will overlap. - // - Code which tries to interpret locations should probably be designed to - // ignore those that it doesn't understand, as more types of locations could - // be recorded in the future. - Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } -func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo) ProtoMessage() {} -func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } - -func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { - if m != nil { - return m.Location - } - return nil -} - -type SourceCodeInfo_Location struct { - // Identifies which part of the FileDescriptorProto was defined at this - // location. - // - // Each element is a field number or an index. They form a path from - // the root FileDescriptorProto to the place where the definition. For - // example, this path: - // [ 4, 3, 2, 7, 1 ] - // refers to: - // file.message_type(3) // 4, 3 - // .field(7) // 2, 7 - // .name() // 1 - // This is because FileDescriptorProto.message_type has field number 4: - // repeated DescriptorProto message_type = 4; - // and DescriptorProto.field has field number 2: - // repeated FieldDescriptorProto field = 2; - // and FieldDescriptorProto.name has field number 1: - // optional string name = 1; - // - // Thus, the above path gives the location of a field name. If we removed - // the last element: - // [ 4, 3, 2, 7 ] - // this path refers to the whole field declaration (from the beginning - // of the label to the terminating semicolon). - Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` - // Always has exactly three or four elements: start line, start column, - // end line (optional, otherwise assumed same as start line), end column. - // These are packed into a single field for efficiency. Note that line - // and column numbers are zero-based -- typically you will want to add - // 1 to each before displaying to a user. - Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"` - // If this SourceCodeInfo represents a complete declaration, these are any - // comments appearing before and after the declaration which appear to be - // attached to the declaration. - // - // A series of line comments appearing on consecutive lines, with no other - // tokens appearing on those lines, will be treated as a single comment. - // - // leading_detached_comments will keep paragraphs of comments that appear - // before (but not connected to) the current element. Each paragraph, - // separated by empty lines, will be one comment element in the repeated - // field. - // - // Only the comment content is provided; comment markers (e.g. //) are - // stripped out. For block comments, leading whitespace and an asterisk - // will be stripped from the beginning of each line other than the first. - // Newlines are included in the output. - // - // Examples: - // - // optional int32 foo = 1; // Comment attached to foo. - // // Comment attached to bar. - // optional int32 bar = 2; - // - // optional string baz = 3; - // // Comment attached to baz. - // // Another line attached to baz. - // - // // Comment attached to qux. - // // - // // Another line attached to qux. - // optional double qux = 4; - // - // // Detached comment for corge. This is not leading or trailing comments - // // to qux or corge because there are blank lines separating it from - // // both. - // - // // Detached comment for corge paragraph 2. - // - // optional string corge = 5; - // /* Block comment attached - // * to corge. Leading asterisks - // * will be removed. */ - // /* Block comment attached to - // * grault. */ - // optional int32 grault = 6; - // - // // ignored detached comments. - LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` - TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` - LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } -func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } -func (*SourceCodeInfo_Location) ProtoMessage() {} -func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18, 0} } - -func (m *SourceCodeInfo_Location) GetPath() []int32 { - if m != nil { - return m.Path - } - return nil -} - -func (m *SourceCodeInfo_Location) GetSpan() []int32 { - if m != nil { - return m.Span - } - return nil -} - -func (m *SourceCodeInfo_Location) GetLeadingComments() string { - if m != nil && m.LeadingComments != nil { - return *m.LeadingComments - } - return "" -} - -func (m *SourceCodeInfo_Location) GetTrailingComments() string { - if m != nil && m.TrailingComments != nil { - return *m.TrailingComments - } - return "" -} - -func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { - if m != nil { - return m.LeadingDetachedComments - } - return nil -} - -// Describes the relationship between generated code and its original source -// file. A GeneratedCodeInfo message is associated with only one generated -// source file, but may contain references to different source .proto files. -type GeneratedCodeInfo struct { - // An Annotation connects some span of text in generated code to an element - // of its generating .proto file. - Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } -func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } -func (*GeneratedCodeInfo) ProtoMessage() {} -func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } - -func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { - if m != nil { - return m.Annotation - } - return nil -} - -type GeneratedCodeInfo_Annotation struct { - // Identifies the element in the original source .proto file. This field - // is formatted the same as SourceCodeInfo.Location.path. - Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` - // Identifies the filesystem path to the original source .proto. - SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` - // Identifies the starting offset in bytes in the generated code - // that relates to the identified object. - Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"` - // Identifies the ending offset in bytes in the generated code that - // relates to the identified offset. The end offset should be one past - // the last relevant byte (so the length of the text = end - begin). - End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } -func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } -func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} -func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { - return fileDescriptor0, []int{19, 0} -} - -func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { - if m != nil { - return m.Path - } - return nil -} - -func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string { - if m != nil && m.SourceFile != nil { - return *m.SourceFile - } - return "" -} - -func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 { - if m != nil && m.Begin != nil { - return *m.Begin - } - return 0 -} - -func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 { - if m != nil && m.End != nil { - return *m.End - } - return 0 -} - -func init() { - proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet") - proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto") - proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") - proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") - proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") - proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") - proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") - proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") - proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") - proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") - proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") - proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions") - proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions") - proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions") - proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions") - proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions") - proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions") - proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions") - proto.RegisterType((*MethodOptions)(nil), "google.protobuf.MethodOptions") - proto.RegisterType((*UninterpretedOption)(nil), "google.protobuf.UninterpretedOption") - proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart") - proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo") - proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location") - proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo") - proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation") - proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) - proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) - proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) - proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) - proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) -} - -func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 2295 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xcc, 0x59, 0x4f, 0x6f, 0x1b, 0xc7, - 0x15, 0xcf, 0xf2, 0x9f, 0xc8, 0x47, 0x8a, 0x1a, 0x8d, 0x14, 0x67, 0xad, 0xfc, 0xb1, 0xcc, 0xd8, - 0xb1, 0x6c, 0xb7, 0x74, 0x20, 0xff, 0x89, 0xa3, 0x14, 0x29, 0x28, 0x71, 0xad, 0xd0, 0x90, 0x44, - 0x76, 0x29, 0xb5, 0x4e, 0x2e, 0x8b, 0xd1, 0xee, 0x90, 0x5a, 0x7b, 0x39, 0xbb, 0xdd, 0x5d, 0xda, - 0x56, 0x4e, 0x06, 0x7a, 0xea, 0xa5, 0xe7, 0xa2, 0x2d, 0x7a, 0xc8, 0x25, 0x40, 0x3f, 0x40, 0x0f, - 0xfd, 0x0a, 0x05, 0x0a, 0xf4, 0x2b, 0x14, 0x05, 0xda, 0x6f, 0xd0, 0x6b, 0x31, 0x33, 0xbb, 0xcb, - 0x5d, 0xfe, 0x89, 0xd5, 0x00, 0x49, 0x7a, 0x12, 0xe7, 0xf7, 0x7e, 0xef, 0xcd, 0x9b, 0x37, 0x6f, - 0xde, 0xbc, 0x1d, 0xc1, 0xe6, 0xd0, 0x75, 0x87, 0x0e, 0xbd, 0xe3, 0xf9, 0x6e, 0xe8, 0x9e, 0x8e, - 0x07, 0x77, 0x2c, 0x1a, 0x98, 0xbe, 0xed, 0x85, 0xae, 0xdf, 0x14, 0x18, 0x5e, 0x91, 0x8c, 0x66, - 0xcc, 0x68, 0x1c, 0xc2, 0xea, 0x23, 0xdb, 0xa1, 0xed, 0x84, 0xd8, 0xa7, 0x21, 0x7e, 0x08, 0x85, - 0x81, 0xed, 0x50, 0x55, 0xd9, 0xcc, 0x6f, 0x55, 0xb7, 0xaf, 0x35, 0xa7, 0x94, 0x9a, 0x59, 0x8d, - 0x1e, 0x87, 0x75, 0xa1, 0xd1, 0xf8, 0x67, 0x01, 0xd6, 0xe6, 0x48, 0x31, 0x86, 0x02, 0x23, 0x23, - 0x6e, 0x51, 0xd9, 0xaa, 0xe8, 0xe2, 0x37, 0x56, 0x61, 0xc9, 0x23, 0xe6, 0x33, 0x32, 0xa4, 0x6a, - 0x4e, 0xc0, 0xf1, 0x10, 0xbf, 0x07, 0x60, 0x51, 0x8f, 0x32, 0x8b, 0x32, 0xf3, 0x5c, 0xcd, 0x6f, - 0xe6, 0xb7, 0x2a, 0x7a, 0x0a, 0xc1, 0xb7, 0x61, 0xd5, 0x1b, 0x9f, 0x3a, 0xb6, 0x69, 0xa4, 0x68, - 0xb0, 0x99, 0xdf, 0x2a, 0xea, 0x48, 0x0a, 0xda, 0x13, 0xf2, 0x0d, 0x58, 0x79, 0x41, 0xc9, 0xb3, - 0x34, 0xb5, 0x2a, 0xa8, 0x75, 0x0e, 0xa7, 0x88, 0x7b, 0x50, 0x1b, 0xd1, 0x20, 0x20, 0x43, 0x6a, - 0x84, 0xe7, 0x1e, 0x55, 0x0b, 0x62, 0xf5, 0x9b, 0x33, 0xab, 0x9f, 0x5e, 0x79, 0x35, 0xd2, 0x3a, - 0x3e, 0xf7, 0x28, 0x6e, 0x41, 0x85, 0xb2, 0xf1, 0x48, 0x5a, 0x28, 0x2e, 0x88, 0x9f, 0xc6, 0xc6, - 0xa3, 0x69, 0x2b, 0x65, 0xae, 0x16, 0x99, 0x58, 0x0a, 0xa8, 0xff, 0xdc, 0x36, 0xa9, 0x5a, 0x12, - 0x06, 0x6e, 0xcc, 0x18, 0xe8, 0x4b, 0xf9, 0xb4, 0x8d, 0x58, 0x0f, 0xef, 0x41, 0x85, 0xbe, 0x0c, - 0x29, 0x0b, 0x6c, 0x97, 0xa9, 0x4b, 0xc2, 0xc8, 0xf5, 0x39, 0xbb, 0x48, 0x1d, 0x6b, 0xda, 0xc4, - 0x44, 0x0f, 0x3f, 0x80, 0x25, 0xd7, 0x0b, 0x6d, 0x97, 0x05, 0x6a, 0x79, 0x53, 0xd9, 0xaa, 0x6e, - 0xbf, 0x33, 0x37, 0x11, 0xba, 0x92, 0xa3, 0xc7, 0x64, 0xdc, 0x01, 0x14, 0xb8, 0x63, 0xdf, 0xa4, - 0x86, 0xe9, 0x5a, 0xd4, 0xb0, 0xd9, 0xc0, 0x55, 0x2b, 0xc2, 0xc0, 0x95, 0xd9, 0x85, 0x08, 0xe2, - 0x9e, 0x6b, 0xd1, 0x0e, 0x1b, 0xb8, 0x7a, 0x3d, 0xc8, 0x8c, 0xf1, 0x25, 0x28, 0x05, 0xe7, 0x2c, - 0x24, 0x2f, 0xd5, 0x9a, 0xc8, 0x90, 0x68, 0xd4, 0xf8, 0x4f, 0x11, 0x56, 0x2e, 0x92, 0x62, 0x9f, - 0x40, 0x71, 0xc0, 0x57, 0xa9, 0xe6, 0xfe, 0x97, 0x18, 0x48, 0x9d, 0x6c, 0x10, 0x4b, 0xdf, 0x32, - 0x88, 0x2d, 0xa8, 0x32, 0x1a, 0x84, 0xd4, 0x92, 0x19, 0x91, 0xbf, 0x60, 0x4e, 0x81, 0x54, 0x9a, - 0x4d, 0xa9, 0xc2, 0xb7, 0x4a, 0xa9, 0x27, 0xb0, 0x92, 0xb8, 0x64, 0xf8, 0x84, 0x0d, 0xe3, 0xdc, - 0xbc, 0xf3, 0x3a, 0x4f, 0x9a, 0x5a, 0xac, 0xa7, 0x73, 0x35, 0xbd, 0x4e, 0x33, 0x63, 0xdc, 0x06, - 0x70, 0x19, 0x75, 0x07, 0x86, 0x45, 0x4d, 0x47, 0x2d, 0x2f, 0x88, 0x52, 0x97, 0x53, 0x66, 0xa2, - 0xe4, 0x4a, 0xd4, 0x74, 0xf0, 0xc7, 0x93, 0x54, 0x5b, 0x5a, 0x90, 0x29, 0x87, 0xf2, 0x90, 0xcd, - 0x64, 0xdb, 0x09, 0xd4, 0x7d, 0xca, 0xf3, 0x9e, 0x5a, 0xd1, 0xca, 0x2a, 0xc2, 0x89, 0xe6, 0x6b, - 0x57, 0xa6, 0x47, 0x6a, 0x72, 0x61, 0xcb, 0x7e, 0x7a, 0x88, 0xdf, 0x87, 0x04, 0x30, 0x44, 0x5a, - 0x81, 0xa8, 0x42, 0xb5, 0x18, 0x3c, 0x22, 0x23, 0xba, 0xf1, 0x10, 0xea, 0xd9, 0xf0, 0xe0, 0x75, - 0x28, 0x06, 0x21, 0xf1, 0x43, 0x91, 0x85, 0x45, 0x5d, 0x0e, 0x30, 0x82, 0x3c, 0x65, 0x96, 0xa8, - 0x72, 0x45, 0x9d, 0xff, 0xdc, 0xf8, 0x08, 0x96, 0x33, 0xd3, 0x5f, 0x54, 0xb1, 0xf1, 0xdb, 0x12, - 0xac, 0xcf, 0xcb, 0xb9, 0xb9, 0xe9, 0x7f, 0x09, 0x4a, 0x6c, 0x3c, 0x3a, 0xa5, 0xbe, 0x9a, 0x17, - 0x16, 0xa2, 0x11, 0x6e, 0x41, 0xd1, 0x21, 0xa7, 0xd4, 0x51, 0x0b, 0x9b, 0xca, 0x56, 0x7d, 0xfb, - 0xf6, 0x85, 0xb2, 0xba, 0x79, 0xc0, 0x55, 0x74, 0xa9, 0x89, 0x3f, 0x85, 0x42, 0x54, 0xe2, 0xb8, - 0x85, 0x5b, 0x17, 0xb3, 0xc0, 0x73, 0x51, 0x17, 0x7a, 0xf8, 0x6d, 0xa8, 0xf0, 0xbf, 0x32, 0xb6, - 0x25, 0xe1, 0x73, 0x99, 0x03, 0x3c, 0xae, 0x78, 0x03, 0xca, 0x22, 0xcd, 0x2c, 0x1a, 0x5f, 0x0d, - 0xc9, 0x98, 0x6f, 0x8c, 0x45, 0x07, 0x64, 0xec, 0x84, 0xc6, 0x73, 0xe2, 0x8c, 0xa9, 0x48, 0x98, - 0x8a, 0x5e, 0x8b, 0xc0, 0x9f, 0x73, 0x0c, 0x5f, 0x81, 0xaa, 0xcc, 0x4a, 0x9b, 0x59, 0xf4, 0xa5, - 0xa8, 0x3e, 0x45, 0x5d, 0x26, 0x6a, 0x87, 0x23, 0x7c, 0xfa, 0xa7, 0x81, 0xcb, 0xe2, 0xad, 0x15, - 0x53, 0x70, 0x40, 0x4c, 0xff, 0xd1, 0x74, 0xe1, 0x7b, 0x77, 0xfe, 0xf2, 0xa6, 0x73, 0xb1, 0xf1, - 0xe7, 0x1c, 0x14, 0xc4, 0x79, 0x5b, 0x81, 0xea, 0xf1, 0xe7, 0x3d, 0xcd, 0x68, 0x77, 0x4f, 0x76, - 0x0f, 0x34, 0xa4, 0xe0, 0x3a, 0x80, 0x00, 0x1e, 0x1d, 0x74, 0x5b, 0xc7, 0x28, 0x97, 0x8c, 0x3b, - 0x47, 0xc7, 0x0f, 0xee, 0xa1, 0x7c, 0xa2, 0x70, 0x22, 0x81, 0x42, 0x9a, 0x70, 0x77, 0x1b, 0x15, - 0x31, 0x82, 0x9a, 0x34, 0xd0, 0x79, 0xa2, 0xb5, 0x1f, 0xdc, 0x43, 0xa5, 0x2c, 0x72, 0x77, 0x1b, - 0x2d, 0xe1, 0x65, 0xa8, 0x08, 0x64, 0xb7, 0xdb, 0x3d, 0x40, 0xe5, 0xc4, 0x66, 0xff, 0x58, 0xef, - 0x1c, 0xed, 0xa3, 0x4a, 0x62, 0x73, 0x5f, 0xef, 0x9e, 0xf4, 0x10, 0x24, 0x16, 0x0e, 0xb5, 0x7e, - 0xbf, 0xb5, 0xaf, 0xa1, 0x6a, 0xc2, 0xd8, 0xfd, 0xfc, 0x58, 0xeb, 0xa3, 0x5a, 0xc6, 0xad, 0xbb, - 0xdb, 0x68, 0x39, 0x99, 0x42, 0x3b, 0x3a, 0x39, 0x44, 0x75, 0xbc, 0x0a, 0xcb, 0x72, 0x8a, 0xd8, - 0x89, 0x95, 0x29, 0xe8, 0xc1, 0x3d, 0x84, 0x26, 0x8e, 0x48, 0x2b, 0xab, 0x19, 0xe0, 0xc1, 0x3d, - 0x84, 0x1b, 0x7b, 0x50, 0x14, 0xd9, 0x85, 0x31, 0xd4, 0x0f, 0x5a, 0xbb, 0xda, 0x81, 0xd1, 0xed, - 0x1d, 0x77, 0xba, 0x47, 0xad, 0x03, 0xa4, 0x4c, 0x30, 0x5d, 0xfb, 0xd9, 0x49, 0x47, 0xd7, 0xda, - 0x28, 0x97, 0xc6, 0x7a, 0x5a, 0xeb, 0x58, 0x6b, 0xa3, 0x7c, 0xc3, 0x84, 0xf5, 0x79, 0x75, 0x66, - 0xee, 0xc9, 0x48, 0x6d, 0x71, 0x6e, 0xc1, 0x16, 0x0b, 0x5b, 0x33, 0x5b, 0xfc, 0x95, 0x02, 0x6b, - 0x73, 0x6a, 0xed, 0xdc, 0x49, 0x7e, 0x0a, 0x45, 0x99, 0xa2, 0xf2, 0xf6, 0xb9, 0x39, 0xb7, 0x68, - 0x8b, 0x84, 0x9d, 0xb9, 0x81, 0x84, 0x5e, 0xfa, 0x06, 0xce, 0x2f, 0xb8, 0x81, 0xb9, 0x89, 0x19, - 0x27, 0x7f, 0xa5, 0x80, 0xba, 0xc8, 0xf6, 0x6b, 0x0a, 0x45, 0x2e, 0x53, 0x28, 0x3e, 0x99, 0x76, - 0xe0, 0xea, 0xe2, 0x35, 0xcc, 0x78, 0xf1, 0xb5, 0x02, 0x97, 0xe6, 0x37, 0x2a, 0x73, 0x7d, 0xf8, - 0x14, 0x4a, 0x23, 0x1a, 0x9e, 0xb9, 0xf1, 0x65, 0xfd, 0xc1, 0x9c, 0x2b, 0x80, 0x8b, 0xa7, 0x63, - 0x15, 0x69, 0xa5, 0xef, 0x90, 0xfc, 0xa2, 0x6e, 0x43, 0x7a, 0x33, 0xe3, 0xe9, 0xaf, 0x73, 0xf0, - 0xe6, 0x5c, 0xe3, 0x73, 0x1d, 0x7d, 0x17, 0xc0, 0x66, 0xde, 0x38, 0x94, 0x17, 0xb2, 0xac, 0x4f, - 0x15, 0x81, 0x88, 0xb3, 0xcf, 0x6b, 0xcf, 0x38, 0x4c, 0xe4, 0x79, 0x21, 0x07, 0x09, 0x09, 0xc2, - 0xc3, 0x89, 0xa3, 0x05, 0xe1, 0xe8, 0x7b, 0x0b, 0x56, 0x3a, 0x73, 0xd7, 0x7d, 0x08, 0xc8, 0x74, - 0x6c, 0xca, 0x42, 0x23, 0x08, 0x7d, 0x4a, 0x46, 0x36, 0x1b, 0x8a, 0x02, 0x5c, 0xde, 0x29, 0x0e, - 0x88, 0x13, 0x50, 0x7d, 0x45, 0x8a, 0xfb, 0xb1, 0x94, 0x6b, 0x88, 0x5b, 0xc6, 0x4f, 0x69, 0x94, - 0x32, 0x1a, 0x52, 0x9c, 0x68, 0x34, 0x7e, 0xb3, 0x04, 0xd5, 0x54, 0x5b, 0x87, 0xaf, 0x42, 0xed, - 0x29, 0x79, 0x4e, 0x8c, 0xb8, 0x55, 0x97, 0x91, 0xa8, 0x72, 0xac, 0x17, 0xb5, 0xeb, 0x1f, 0xc2, - 0xba, 0xa0, 0xb8, 0xe3, 0x90, 0xfa, 0x86, 0xe9, 0x90, 0x20, 0x10, 0x41, 0x2b, 0x0b, 0x2a, 0xe6, - 0xb2, 0x2e, 0x17, 0xed, 0xc5, 0x12, 0x7c, 0x1f, 0xd6, 0x84, 0xc6, 0x68, 0xec, 0x84, 0xb6, 0xe7, - 0x50, 0x83, 0x7f, 0x3c, 0x04, 0xa2, 0x10, 0x27, 0x9e, 0xad, 0x72, 0xc6, 0x61, 0x44, 0xe0, 0x1e, - 0x05, 0xb8, 0x0d, 0xef, 0x0a, 0xb5, 0x21, 0x65, 0xd4, 0x27, 0x21, 0x35, 0xe8, 0x2f, 0xc7, 0xc4, - 0x09, 0x0c, 0xc2, 0x2c, 0xe3, 0x8c, 0x04, 0x67, 0xea, 0x3a, 0x37, 0xb0, 0x9b, 0x53, 0x15, 0xfd, - 0x32, 0x27, 0xee, 0x47, 0x3c, 0x4d, 0xd0, 0x5a, 0xcc, 0xfa, 0x8c, 0x04, 0x67, 0x78, 0x07, 0x2e, - 0x09, 0x2b, 0x41, 0xe8, 0xdb, 0x6c, 0x68, 0x98, 0x67, 0xd4, 0x7c, 0x66, 0x8c, 0xc3, 0xc1, 0x43, - 0xf5, 0xed, 0xf4, 0xfc, 0xc2, 0xc3, 0xbe, 0xe0, 0xec, 0x71, 0xca, 0x49, 0x38, 0x78, 0x88, 0xfb, - 0x50, 0xe3, 0x9b, 0x31, 0xb2, 0xbf, 0xa4, 0xc6, 0xc0, 0xf5, 0xc5, 0xcd, 0x52, 0x9f, 0x73, 0xb2, - 0x53, 0x11, 0x6c, 0x76, 0x23, 0x85, 0x43, 0xd7, 0xa2, 0x3b, 0xc5, 0x7e, 0x4f, 0xd3, 0xda, 0x7a, - 0x35, 0xb6, 0xf2, 0xc8, 0xf5, 0x79, 0x42, 0x0d, 0xdd, 0x24, 0xc0, 0x55, 0x99, 0x50, 0x43, 0x37, - 0x0e, 0xef, 0x7d, 0x58, 0x33, 0x4d, 0xb9, 0x66, 0xdb, 0x34, 0xa2, 0x16, 0x3f, 0x50, 0x51, 0x26, - 0x58, 0xa6, 0xb9, 0x2f, 0x09, 0x51, 0x8e, 0x07, 0xf8, 0x63, 0x78, 0x73, 0x12, 0xac, 0xb4, 0xe2, - 0xea, 0xcc, 0x2a, 0xa7, 0x55, 0xef, 0xc3, 0x9a, 0x77, 0x3e, 0xab, 0x88, 0x33, 0x33, 0x7a, 0xe7, - 0xd3, 0x6a, 0xd7, 0xc5, 0x67, 0x9b, 0x4f, 0x4d, 0x12, 0x52, 0x4b, 0x7d, 0x2b, 0xcd, 0x4e, 0x09, - 0xf0, 0x1d, 0x40, 0xa6, 0x69, 0x50, 0x46, 0x4e, 0x1d, 0x6a, 0x10, 0x9f, 0x32, 0x12, 0xa8, 0x57, - 0xd2, 0xe4, 0xba, 0x69, 0x6a, 0x42, 0xda, 0x12, 0x42, 0x7c, 0x0b, 0x56, 0xdd, 0xd3, 0xa7, 0xa6, - 0xcc, 0x2c, 0xc3, 0xf3, 0xe9, 0xc0, 0x7e, 0xa9, 0x5e, 0x13, 0x61, 0x5a, 0xe1, 0x02, 0x91, 0x57, - 0x3d, 0x01, 0xe3, 0x9b, 0x80, 0xcc, 0xe0, 0x8c, 0xf8, 0x9e, 0xb8, 0xda, 0x03, 0x8f, 0x98, 0x54, - 0xbd, 0x2e, 0xa9, 0x12, 0x3f, 0x8a, 0x61, 0xfc, 0x04, 0xd6, 0xc7, 0xcc, 0x66, 0x21, 0xf5, 0x3d, - 0x9f, 0xf2, 0x0e, 0x5d, 0x1e, 0x33, 0xf5, 0x5f, 0x4b, 0x0b, 0x7a, 0xec, 0x93, 0x34, 0x5b, 0xee, - 0xae, 0xbe, 0x36, 0x9e, 0x05, 0x1b, 0x3b, 0x50, 0x4b, 0x6f, 0x3a, 0xae, 0x80, 0xdc, 0x76, 0xa4, - 0xf0, 0x0b, 0x74, 0xaf, 0xdb, 0xe6, 0x57, 0xdf, 0x17, 0x1a, 0xca, 0xf1, 0x2b, 0xf8, 0xa0, 0x73, - 0xac, 0x19, 0xfa, 0xc9, 0xd1, 0x71, 0xe7, 0x50, 0x43, 0xf9, 0x5b, 0x95, 0xf2, 0xbf, 0x97, 0xd0, - 0xab, 0x57, 0xaf, 0x5e, 0xe5, 0x1e, 0x17, 0xca, 0x1f, 0xa0, 0x1b, 0x8d, 0xbf, 0xe6, 0xa0, 0x9e, - 0x6d, 0x7e, 0xf1, 0x4f, 0xe0, 0xad, 0xf8, 0x4b, 0x35, 0xa0, 0xa1, 0xf1, 0xc2, 0xf6, 0x45, 0x36, - 0x8e, 0x88, 0x6c, 0x1f, 0x93, 0x40, 0xae, 0x47, 0xac, 0x3e, 0x0d, 0x7f, 0x61, 0xfb, 0x3c, 0xd7, - 0x46, 0x24, 0xc4, 0x07, 0x70, 0x85, 0xb9, 0x46, 0x10, 0x12, 0x66, 0x11, 0xdf, 0x32, 0x26, 0x6f, - 0x04, 0x06, 0x31, 0x4d, 0x1a, 0x04, 0xae, 0xbc, 0x05, 0x12, 0x2b, 0xef, 0x30, 0xb7, 0x1f, 0x91, - 0x27, 0xe5, 0xb1, 0x15, 0x51, 0xa7, 0x36, 0x3d, 0xbf, 0x68, 0xd3, 0xdf, 0x86, 0xca, 0x88, 0x78, - 0x06, 0x65, 0xa1, 0x7f, 0x2e, 0x5a, 0xb6, 0xb2, 0x5e, 0x1e, 0x11, 0x4f, 0xe3, 0xe3, 0xef, 0x6e, - 0x27, 0xb2, 0xd1, 0x2c, 0xa3, 0x4a, 0xe3, 0x1f, 0x79, 0xa8, 0xa5, 0x9b, 0x37, 0xde, 0x0b, 0x9b, - 0xa2, 0x50, 0x2b, 0xe2, 0x28, 0xbf, 0xff, 0x8d, 0xad, 0x5e, 0x73, 0x8f, 0x57, 0xf0, 0x9d, 0x92, - 0x6c, 0xa9, 0x74, 0xa9, 0xc9, 0x6f, 0x4f, 0x7e, 0x78, 0xa9, 0x6c, 0xd4, 0xcb, 0x7a, 0x34, 0xc2, - 0xfb, 0x50, 0x7a, 0x1a, 0x08, 0xdb, 0x25, 0x61, 0xfb, 0xda, 0x37, 0xdb, 0x7e, 0xdc, 0x17, 0xc6, - 0x2b, 0x8f, 0xfb, 0xc6, 0x51, 0x57, 0x3f, 0x6c, 0x1d, 0xe8, 0x91, 0x3a, 0xbe, 0x0c, 0x05, 0x87, - 0x7c, 0x79, 0x9e, 0xad, 0xf5, 0x02, 0xba, 0x68, 0xf8, 0x2f, 0x43, 0xe1, 0x05, 0x25, 0xcf, 0xb2, - 0x15, 0x56, 0x40, 0xdf, 0xe1, 0x31, 0xb8, 0x03, 0x45, 0x11, 0x2f, 0x0c, 0x10, 0x45, 0x0c, 0xbd, - 0x81, 0xcb, 0x50, 0xd8, 0xeb, 0xea, 0xfc, 0x28, 0x20, 0xa8, 0x49, 0xd4, 0xe8, 0x75, 0xb4, 0x3d, - 0x0d, 0xe5, 0x1a, 0xf7, 0xa1, 0x24, 0x83, 0xc0, 0x8f, 0x49, 0x12, 0x06, 0xf4, 0x46, 0x34, 0x8c, - 0x6c, 0x28, 0xb1, 0xf4, 0xe4, 0x70, 0x57, 0xd3, 0x51, 0x2e, 0xbb, 0xc9, 0x05, 0x54, 0x6c, 0x04, - 0x50, 0x4b, 0x77, 0x6f, 0xdf, 0x4b, 0x7e, 0x35, 0xfe, 0xa2, 0x40, 0x35, 0xd5, 0x8d, 0xf1, 0x3e, - 0x80, 0x38, 0x8e, 0xfb, 0xc2, 0x20, 0x8e, 0x4d, 0x82, 0x28, 0x35, 0x40, 0x40, 0x2d, 0x8e, 0x5c, - 0x74, 0xeb, 0xbe, 0x17, 0xe7, 0xff, 0xa8, 0x00, 0x9a, 0xee, 0xe4, 0xa6, 0x1c, 0x54, 0x7e, 0x50, - 0x07, 0xff, 0xa0, 0x40, 0x3d, 0xdb, 0xbe, 0x4d, 0xb9, 0x77, 0xf5, 0x07, 0x75, 0xef, 0xf7, 0x0a, - 0x2c, 0x67, 0x9a, 0xb6, 0xff, 0x2b, 0xef, 0x7e, 0x97, 0x87, 0xb5, 0x39, 0x7a, 0xb8, 0x15, 0x75, - 0xb7, 0xb2, 0xe1, 0xfe, 0xf1, 0x45, 0xe6, 0x6a, 0xf2, 0xfb, 0xb3, 0x47, 0xfc, 0x30, 0x6a, 0x86, - 0x6f, 0x02, 0xb2, 0x2d, 0xca, 0x42, 0x7b, 0x60, 0x53, 0x3f, 0xfa, 0x22, 0x97, 0x2d, 0xef, 0xca, - 0x04, 0x97, 0x1f, 0xe5, 0x3f, 0x02, 0xec, 0xb9, 0x81, 0x1d, 0xda, 0xcf, 0xa9, 0x61, 0xb3, 0xf8, - 0xf3, 0x9d, 0xb7, 0xc0, 0x05, 0x1d, 0xc5, 0x92, 0x0e, 0x0b, 0x13, 0x36, 0xa3, 0x43, 0x32, 0xc5, - 0xe6, 0x15, 0x30, 0xaf, 0xa3, 0x58, 0x92, 0xb0, 0xaf, 0x42, 0xcd, 0x72, 0xc7, 0xbc, 0xa1, 0x90, - 0x3c, 0x5e, 0x70, 0x15, 0xbd, 0x2a, 0xb1, 0x84, 0x12, 0x75, 0x7c, 0x93, 0x77, 0x83, 0x9a, 0x5e, - 0x95, 0x98, 0xa4, 0xdc, 0x80, 0x15, 0x32, 0x1c, 0xfa, 0xdc, 0x78, 0x6c, 0x48, 0xf6, 0xb0, 0xf5, - 0x04, 0x16, 0xc4, 0x8d, 0xc7, 0x50, 0x8e, 0xe3, 0xc0, 0x6f, 0x36, 0x1e, 0x09, 0xc3, 0x93, 0xaf, - 0x37, 0xb9, 0xad, 0x8a, 0x5e, 0x66, 0xb1, 0xf0, 0x2a, 0xd4, 0xec, 0xc0, 0x98, 0x3c, 0x23, 0xe6, - 0x36, 0x73, 0x5b, 0x65, 0xbd, 0x6a, 0x07, 0xc9, 0xbb, 0x51, 0xe3, 0xeb, 0x1c, 0xd4, 0xb3, 0xcf, - 0xa0, 0xb8, 0x0d, 0x65, 0xc7, 0x35, 0x89, 0x48, 0x04, 0xf9, 0x06, 0xbf, 0xf5, 0x9a, 0x97, 0xd3, - 0xe6, 0x41, 0xc4, 0xd7, 0x13, 0xcd, 0x8d, 0xbf, 0x29, 0x50, 0x8e, 0x61, 0x7c, 0x09, 0x0a, 0x1e, - 0x09, 0xcf, 0x84, 0xb9, 0xe2, 0x6e, 0x0e, 0x29, 0xba, 0x18, 0x73, 0x3c, 0xf0, 0x08, 0x13, 0x29, - 0x10, 0xe1, 0x7c, 0xcc, 0xf7, 0xd5, 0xa1, 0xc4, 0x12, 0x0d, 0xb2, 0x3b, 0x1a, 0x51, 0x16, 0x06, - 0xf1, 0xbe, 0x46, 0xf8, 0x5e, 0x04, 0xe3, 0xdb, 0xb0, 0x1a, 0xfa, 0xc4, 0x76, 0x32, 0xdc, 0x82, - 0xe0, 0xa2, 0x58, 0x90, 0x90, 0x77, 0xe0, 0x72, 0x6c, 0xd7, 0xa2, 0x21, 0x31, 0xcf, 0xa8, 0x35, - 0x51, 0x2a, 0x89, 0x37, 0xb6, 0xb7, 0x22, 0x42, 0x3b, 0x92, 0xc7, 0xba, 0x8d, 0xbf, 0x2b, 0xb0, - 0x1a, 0xb7, 0xf4, 0x56, 0x12, 0xac, 0x43, 0x00, 0xc2, 0x98, 0x1b, 0xa6, 0xc3, 0x35, 0x9b, 0xca, - 0x33, 0x7a, 0xcd, 0x56, 0xa2, 0xa4, 0xa7, 0x0c, 0x6c, 0x8c, 0x00, 0x26, 0x92, 0x85, 0x61, 0xbb, - 0x02, 0xd5, 0xe8, 0x8d, 0x5b, 0xfc, 0xa3, 0x44, 0x7e, 0x04, 0x82, 0x84, 0x78, 0xef, 0x8f, 0xd7, - 0xa1, 0x78, 0x4a, 0x87, 0x36, 0x8b, 0x5e, 0xde, 0xe4, 0x20, 0x7e, 0xcf, 0x2b, 0x24, 0xef, 0x79, - 0xbb, 0x4f, 0x60, 0xcd, 0x74, 0x47, 0xd3, 0xee, 0xee, 0xa2, 0xa9, 0x0f, 0xd1, 0xe0, 0x33, 0xe5, - 0x0b, 0x98, 0x74, 0x6a, 0x5f, 0xe5, 0xf2, 0xfb, 0xbd, 0xdd, 0x3f, 0xe5, 0x36, 0xf6, 0xa5, 0x5e, - 0x2f, 0x5e, 0xa6, 0x4e, 0x07, 0x0e, 0x35, 0xb9, 0xeb, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x5f, - 0x1c, 0x48, 0x4f, 0x0d, 0x1a, 0x00, 0x00, -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile deleted file mode 100644 index b5715c3..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -include $(GOROOT)/src/Make.inc - -TARG=github.com/golang/protobuf/compiler/generator -GOFILES=\ - generator.go\ - -DEPS=../descriptor ../plugin ../../proto - -include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go deleted file mode 100644 index a5879fe..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go +++ /dev/null @@ -1,2806 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* - The code generator for the plugin for the Google protocol buffer compiler. - It generates Go code from the protocol buffer description files read by the - main routine. -*/ -package generator - -import ( - "bufio" - "bytes" - "compress/gzip" - "fmt" - "go/parser" - "go/printer" - "go/token" - "log" - "os" - "path" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "github.com/golang/protobuf/proto" - - "github.com/golang/protobuf/protoc-gen-go/descriptor" - plugin "github.com/golang/protobuf/protoc-gen-go/plugin" -) - -// generatedCodeVersion indicates a version of the generated code. -// It is incremented whenever an incompatibility between the generated code and -// proto package is introduced; the generated code references -// a constant, proto.ProtoPackageIsVersionN (where N is generatedCodeVersion). -const generatedCodeVersion = 2 - -// A Plugin provides functionality to add to the output during Go code generation, -// such as to produce RPC stubs. -type Plugin interface { - // Name identifies the plugin. - Name() string - // Init is called once after data structures are built but before - // code generation begins. - Init(g *Generator) - // Generate produces the code generated by the plugin for this file, - // except for the imports, by calling the generator's methods P, In, and Out. - Generate(file *FileDescriptor) - // GenerateImports produces the import declarations for this file. - // It is called after Generate. - GenerateImports(file *FileDescriptor) -} - -var plugins []Plugin - -// RegisterPlugin installs a (second-order) plugin to be run when the Go output is generated. -// It is typically called during initialization. -func RegisterPlugin(p Plugin) { - plugins = append(plugins, p) -} - -// Each type we import as a protocol buffer (other than FileDescriptorProto) needs -// a pointer to the FileDescriptorProto that represents it. These types achieve that -// wrapping by placing each Proto inside a struct with the pointer to its File. The -// structs have the same names as their contents, with "Proto" removed. -// FileDescriptor is used to store the things that it points to. - -// The file and package name method are common to messages and enums. -type common struct { - file *descriptor.FileDescriptorProto // File this object comes from. -} - -// PackageName is name in the package clause in the generated file. -func (c *common) PackageName() string { return uniquePackageOf(c.file) } - -func (c *common) File() *descriptor.FileDescriptorProto { return c.file } - -func fileIsProto3(file *descriptor.FileDescriptorProto) bool { - return file.GetSyntax() == "proto3" -} - -func (c *common) proto3() bool { return fileIsProto3(c.file) } - -// Descriptor represents a protocol buffer message. -type Descriptor struct { - common - *descriptor.DescriptorProto - parent *Descriptor // The containing message, if any. - nested []*Descriptor // Inner messages, if any. - enums []*EnumDescriptor // Inner enums, if any. - ext []*ExtensionDescriptor // Extensions, if any. - typename []string // Cached typename vector. - index int // The index into the container, whether the file or another message. - path string // The SourceCodeInfo path as comma-separated integers. - group bool -} - -// TypeName returns the elements of the dotted type name. -// The package name is not part of this name. -func (d *Descriptor) TypeName() []string { - if d.typename != nil { - return d.typename - } - n := 0 - for parent := d; parent != nil; parent = parent.parent { - n++ - } - s := make([]string, n, n) - for parent := d; parent != nil; parent = parent.parent { - n-- - s[n] = parent.GetName() - } - d.typename = s - return s -} - -// EnumDescriptor describes an enum. If it's at top level, its parent will be nil. -// Otherwise it will be the descriptor of the message in which it is defined. -type EnumDescriptor struct { - common - *descriptor.EnumDescriptorProto - parent *Descriptor // The containing message, if any. - typename []string // Cached typename vector. - index int // The index into the container, whether the file or a message. - path string // The SourceCodeInfo path as comma-separated integers. -} - -// TypeName returns the elements of the dotted type name. -// The package name is not part of this name. -func (e *EnumDescriptor) TypeName() (s []string) { - if e.typename != nil { - return e.typename - } - name := e.GetName() - if e.parent == nil { - s = make([]string, 1) - } else { - pname := e.parent.TypeName() - s = make([]string, len(pname)+1) - copy(s, pname) - } - s[len(s)-1] = name - e.typename = s - return s -} - -// Everything but the last element of the full type name, CamelCased. -// The values of type Foo.Bar are call Foo_value1... not Foo_Bar_value1... . -func (e *EnumDescriptor) prefix() string { - if e.parent == nil { - // If the enum is not part of a message, the prefix is just the type name. - return CamelCase(*e.Name) + "_" - } - typeName := e.TypeName() - return CamelCaseSlice(typeName[0:len(typeName)-1]) + "_" -} - -// The integer value of the named constant in this enumerated type. -func (e *EnumDescriptor) integerValueAsString(name string) string { - for _, c := range e.Value { - if c.GetName() == name { - return fmt.Sprint(c.GetNumber()) - } - } - log.Fatal("cannot find value for enum constant") - return "" -} - -// ExtensionDescriptor describes an extension. If it's at top level, its parent will be nil. -// Otherwise it will be the descriptor of the message in which it is defined. -type ExtensionDescriptor struct { - common - *descriptor.FieldDescriptorProto - parent *Descriptor // The containing message, if any. -} - -// TypeName returns the elements of the dotted type name. -// The package name is not part of this name. -func (e *ExtensionDescriptor) TypeName() (s []string) { - name := e.GetName() - if e.parent == nil { - // top-level extension - s = make([]string, 1) - } else { - pname := e.parent.TypeName() - s = make([]string, len(pname)+1) - copy(s, pname) - } - s[len(s)-1] = name - return s -} - -// DescName returns the variable name used for the generated descriptor. -func (e *ExtensionDescriptor) DescName() string { - // The full type name. - typeName := e.TypeName() - // Each scope of the extension is individually CamelCased, and all are joined with "_" with an "E_" prefix. - for i, s := range typeName { - typeName[i] = CamelCase(s) - } - return "E_" + strings.Join(typeName, "_") -} - -// ImportedDescriptor describes a type that has been publicly imported from another file. -type ImportedDescriptor struct { - common - o Object -} - -func (id *ImportedDescriptor) TypeName() []string { return id.o.TypeName() } - -// FileDescriptor describes an protocol buffer descriptor file (.proto). -// It includes slices of all the messages and enums defined within it. -// Those slices are constructed by WrapTypes. -type FileDescriptor struct { - *descriptor.FileDescriptorProto - desc []*Descriptor // All the messages defined in this file. - enum []*EnumDescriptor // All the enums defined in this file. - ext []*ExtensionDescriptor // All the top-level extensions defined in this file. - imp []*ImportedDescriptor // All types defined in files publicly imported by this file. - - // Comments, stored as a map of path (comma-separated integers) to the comment. - comments map[string]*descriptor.SourceCodeInfo_Location - - // The full list of symbols that are exported, - // as a map from the exported object to its symbols. - // This is used for supporting public imports. - exported map[Object][]symbol - - index int // The index of this file in the list of files to generate code for - - proto3 bool // whether to generate proto3 code for this file -} - -// PackageName is the package name we'll use in the generated code to refer to this file. -func (d *FileDescriptor) PackageName() string { return uniquePackageOf(d.FileDescriptorProto) } - -// VarName is the variable name we'll use in the generated code to refer -// to the compressed bytes of this descriptor. It is not exported, so -// it is only valid inside the generated package. -func (d *FileDescriptor) VarName() string { return fmt.Sprintf("fileDescriptor%d", d.index) } - -// goPackageOption interprets the file's go_package option. -// If there is no go_package, it returns ("", "", false). -// If there's a simple name, it returns ("", pkg, true). -// If the option implies an import path, it returns (impPath, pkg, true). -func (d *FileDescriptor) goPackageOption() (impPath, pkg string, ok bool) { - pkg = d.GetOptions().GetGoPackage() - if pkg == "" { - return - } - ok = true - // The presence of a slash implies there's an import path. - slash := strings.LastIndex(pkg, "/") - if slash < 0 { - return - } - impPath, pkg = pkg, pkg[slash+1:] - // A semicolon-delimited suffix overrides the package name. - sc := strings.IndexByte(impPath, ';') - if sc < 0 { - return - } - impPath, pkg = impPath[:sc], impPath[sc+1:] - return -} - -// goPackageName returns the Go package name to use in the -// generated Go file. The result explicit reports whether the name -// came from an option go_package statement. If explicit is false, -// the name was derived from the protocol buffer's package statement -// or the input file name. -func (d *FileDescriptor) goPackageName() (name string, explicit bool) { - // Does the file have a "go_package" option? - if _, pkg, ok := d.goPackageOption(); ok { - return pkg, true - } - - // Does the file have a package clause? - if pkg := d.GetPackage(); pkg != "" { - return pkg, false - } - // Use the file base name. - return baseName(d.GetName()), false -} - -// goFileName returns the output name for the generated Go file. -func (d *FileDescriptor) goFileName() string { - name := *d.Name - if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" { - name = name[:len(name)-len(ext)] - } - name += ".pb.go" - - // Does the file have a "go_package" option? - // If it does, it may override the filename. - if impPath, _, ok := d.goPackageOption(); ok && impPath != "" { - // Replace the existing dirname with the declared import path. - _, name = path.Split(name) - name = path.Join(impPath, name) - return name - } - - return name -} - -func (d *FileDescriptor) addExport(obj Object, sym symbol) { - d.exported[obj] = append(d.exported[obj], sym) -} - -// symbol is an interface representing an exported Go symbol. -type symbol interface { - // GenerateAlias should generate an appropriate alias - // for the symbol from the named package. - GenerateAlias(g *Generator, pkg string) -} - -type messageSymbol struct { - sym string - hasExtensions, isMessageSet bool - hasOneof bool - getters []getterSymbol -} - -type getterSymbol struct { - name string - typ string - typeName string // canonical name in proto world; empty for proto.Message and similar - genType bool // whether typ contains a generated type (message/group/enum) -} - -func (ms *messageSymbol) GenerateAlias(g *Generator, pkg string) { - remoteSym := pkg + "." + ms.sym - - g.P("type ", ms.sym, " ", remoteSym) - g.P("func (m *", ms.sym, ") Reset() { (*", remoteSym, ")(m).Reset() }") - g.P("func (m *", ms.sym, ") String() string { return (*", remoteSym, ")(m).String() }") - g.P("func (*", ms.sym, ") ProtoMessage() {}") - if ms.hasExtensions { - g.P("func (*", ms.sym, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange ", - "{ return (*", remoteSym, ")(nil).ExtensionRangeArray() }") - if ms.isMessageSet { - g.P("func (m *", ms.sym, ") Marshal() ([]byte, error) ", - "{ return (*", remoteSym, ")(m).Marshal() }") - g.P("func (m *", ms.sym, ") Unmarshal(buf []byte) error ", - "{ return (*", remoteSym, ")(m).Unmarshal(buf) }") - } - } - if ms.hasOneof { - // Oneofs and public imports do not mix well. - // We can make them work okay for the binary format, - // but they're going to break weirdly for text/JSON. - enc := "_" + ms.sym + "_OneofMarshaler" - dec := "_" + ms.sym + "_OneofUnmarshaler" - size := "_" + ms.sym + "_OneofSizer" - encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" - decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" - sizeSig := "(msg " + g.Pkg["proto"] + ".Message) int" - g.P("func (m *", ms.sym, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") - g.P("return ", enc, ", ", dec, ", ", size, ", nil") - g.P("}") - - g.P("func ", enc, encSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("enc, _, _, _ := m0.XXX_OneofFuncs()") - g.P("return enc(m0, b)") - g.P("}") - - g.P("func ", dec, decSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, dec, _, _ := m0.XXX_OneofFuncs()") - g.P("return dec(m0, tag, wire, b)") - g.P("}") - - g.P("func ", size, sizeSig, " {") - g.P("m := msg.(*", ms.sym, ")") - g.P("m0 := (*", remoteSym, ")(m)") - g.P("_, _, size, _ := m0.XXX_OneofFuncs()") - g.P("return size(m0)") - g.P("}") - } - for _, get := range ms.getters { - - if get.typeName != "" { - g.RecordTypeUse(get.typeName) - } - typ := get.typ - val := "(*" + remoteSym + ")(m)." + get.name + "()" - if get.genType { - // typ will be "*pkg.T" (message/group) or "pkg.T" (enum) - // or "map[t]*pkg.T" (map to message/enum). - // The first two of those might have a "[]" prefix if it is repeated. - // Drop any package qualifier since we have hoisted the type into this package. - rep := strings.HasPrefix(typ, "[]") - if rep { - typ = typ[2:] - } - isMap := strings.HasPrefix(typ, "map[") - star := typ[0] == '*' - if !isMap { // map types handled lower down - typ = typ[strings.Index(typ, ".")+1:] - } - if star { - typ = "*" + typ - } - if rep { - // Go does not permit conversion between slice types where both - // element types are named. That means we need to generate a bit - // of code in this situation. - // typ is the element type. - // val is the expression to get the slice from the imported type. - - ctyp := typ // conversion type expression; "Foo" or "(*Foo)" - if star { - ctyp = "(" + typ + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() []", typ, " {") - g.In() - g.P("o := ", val) - g.P("if o == nil {") - g.In() - g.P("return nil") - g.Out() - g.P("}") - g.P("s := make([]", typ, ", len(o))") - g.P("for i, x := range o {") - g.In() - g.P("s[i] = ", ctyp, "(x)") - g.Out() - g.P("}") - g.P("return s") - g.Out() - g.P("}") - continue - } - if isMap { - // Split map[keyTyp]valTyp. - bra, ket := strings.Index(typ, "["), strings.Index(typ, "]") - keyTyp, valTyp := typ[bra+1:ket], typ[ket+1:] - // Drop any package qualifier. - // Only the value type may be foreign. - star := valTyp[0] == '*' - valTyp = valTyp[strings.Index(valTyp, ".")+1:] - if star { - valTyp = "*" + valTyp - } - - typ := "map[" + keyTyp + "]" + valTyp - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " {") - g.P("o := ", val) - g.P("if o == nil { return nil }") - g.P("s := make(", typ, ", len(o))") - g.P("for k, v := range o {") - g.P("s[k] = (", valTyp, ")(v)") - g.P("}") - g.P("return s") - g.P("}") - continue - } - // Convert imported type into the forwarding type. - val = "(" + typ + ")(" + val + ")" - } - - g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " { return ", val, " }") - } - -} - -type enumSymbol struct { - name string - proto3 bool // Whether this came from a proto3 file. -} - -func (es enumSymbol) GenerateAlias(g *Generator, pkg string) { - s := es.name - g.P("type ", s, " ", pkg, ".", s) - g.P("var ", s, "_name = ", pkg, ".", s, "_name") - g.P("var ", s, "_value = ", pkg, ".", s, "_value") - g.P("func (x ", s, ") String() string { return (", pkg, ".", s, ")(x).String() }") - if !es.proto3 { - g.P("func (x ", s, ") Enum() *", s, "{ return (*", s, ")((", pkg, ".", s, ")(x).Enum()) }") - g.P("func (x *", s, ") UnmarshalJSON(data []byte) error { return (*", pkg, ".", s, ")(x).UnmarshalJSON(data) }") - } -} - -type constOrVarSymbol struct { - sym string - typ string // either "const" or "var" - cast string // if non-empty, a type cast is required (used for enums) -} - -func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { - v := pkg + "." + cs.sym - if cs.cast != "" { - v = cs.cast + "(" + v + ")" - } - g.P(cs.typ, " ", cs.sym, " = ", v) -} - -// Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects. -type Object interface { - PackageName() string // The name we use in our output (a_b_c), possibly renamed for uniqueness. - TypeName() []string - File() *descriptor.FileDescriptorProto -} - -// Each package name we generate must be unique. The package we're generating -// gets its own name but every other package must have a unique name that does -// not conflict in the code we generate. These names are chosen globally (although -// they don't have to be, it simplifies things to do them globally). -func uniquePackageOf(fd *descriptor.FileDescriptorProto) string { - s, ok := uniquePackageName[fd] - if !ok { - log.Fatal("internal error: no package name defined for " + fd.GetName()) - } - return s -} - -// Generator is the type whose methods generate the output, stored in the associated response structure. -type Generator struct { - *bytes.Buffer - - Request *plugin.CodeGeneratorRequest // The input. - Response *plugin.CodeGeneratorResponse // The output. - - Param map[string]string // Command-line parameters. - PackageImportPath string // Go import path of the package we're generating code for - ImportPrefix string // String to prefix to imported package file names. - ImportMap map[string]string // Mapping from .proto file name to import path - - Pkg map[string]string // The names under which we import support packages - - packageName string // What we're calling ourselves. - allFiles []*FileDescriptor // All files in the tree - allFilesByName map[string]*FileDescriptor // All files by filename. - genFiles []*FileDescriptor // Those files we will generate output for. - file *FileDescriptor // The file we are compiling now. - usedPackages map[string]bool // Names of packages used in current file. - typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. - init []string // Lines to emit in the init function. - indent string - writeOutput bool -} - -// New creates a new generator and allocates the request and response protobufs. -func New() *Generator { - g := new(Generator) - g.Buffer = new(bytes.Buffer) - g.Request = new(plugin.CodeGeneratorRequest) - g.Response = new(plugin.CodeGeneratorResponse) - return g -} - -// Error reports a problem, including an error, and exits the program. -func (g *Generator) Error(err error, msgs ...string) { - s := strings.Join(msgs, " ") + ":" + err.Error() - log.Print("protoc-gen-go: error:", s) - os.Exit(1) -} - -// Fail reports a problem and exits the program. -func (g *Generator) Fail(msgs ...string) { - s := strings.Join(msgs, " ") - log.Print("protoc-gen-go: error:", s) - os.Exit(1) -} - -// CommandLineParameters breaks the comma-separated list of key=value pairs -// in the parameter (a member of the request protobuf) into a key/value map. -// It then sets file name mappings defined by those entries. -func (g *Generator) CommandLineParameters(parameter string) { - g.Param = make(map[string]string) - for _, p := range strings.Split(parameter, ",") { - if i := strings.Index(p, "="); i < 0 { - g.Param[p] = "" - } else { - g.Param[p[0:i]] = p[i+1:] - } - } - - g.ImportMap = make(map[string]string) - pluginList := "none" // Default list of plugin names to enable (empty means all). - for k, v := range g.Param { - switch k { - case "import_prefix": - g.ImportPrefix = v - case "import_path": - g.PackageImportPath = v - case "plugins": - pluginList = v - default: - if len(k) > 0 && k[0] == 'M' { - g.ImportMap[k[1:]] = v - } - } - } - if pluginList != "" { - // Amend the set of plugins. - enabled := make(map[string]bool) - for _, name := range strings.Split(pluginList, "+") { - enabled[name] = true - } - var nplugins []Plugin - for _, p := range plugins { - if enabled[p.Name()] { - nplugins = append(nplugins, p) - } - } - plugins = nplugins - } -} - -// DefaultPackageName returns the package name printed for the object. -// If its file is in a different package, it returns the package name we're using for this file, plus ".". -// Otherwise it returns the empty string. -func (g *Generator) DefaultPackageName(obj Object) string { - pkg := obj.PackageName() - if pkg == g.packageName { - return "" - } - return pkg + "." -} - -// For each input file, the unique package name to use, underscored. -var uniquePackageName = make(map[*descriptor.FileDescriptorProto]string) - -// Package names already registered. Key is the name from the .proto file; -// value is the name that appears in the generated code. -var pkgNamesInUse = make(map[string]bool) - -// Create and remember a guaranteed unique package name for this file descriptor. -// Pkg is the candidate name. If f is nil, it's a builtin package like "proto" and -// has no file descriptor. -func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { - // Convert dots to underscores before finding a unique alias. - pkg = strings.Map(badToUnderscore, pkg) - - for i, orig := 1, pkg; pkgNamesInUse[pkg]; i++ { - // It's a duplicate; must rename. - pkg = orig + strconv.Itoa(i) - } - // Install it. - pkgNamesInUse[pkg] = true - if f != nil { - uniquePackageName[f.FileDescriptorProto] = pkg - } - return pkg -} - -var isGoKeyword = map[string]bool{ - "break": true, - "case": true, - "chan": true, - "const": true, - "continue": true, - "default": true, - "else": true, - "defer": true, - "fallthrough": true, - "for": true, - "func": true, - "go": true, - "goto": true, - "if": true, - "import": true, - "interface": true, - "map": true, - "package": true, - "range": true, - "return": true, - "select": true, - "struct": true, - "switch": true, - "type": true, - "var": true, -} - -// defaultGoPackage returns the package name to use, -// derived from the import path of the package we're building code for. -func (g *Generator) defaultGoPackage() string { - p := g.PackageImportPath - if i := strings.LastIndex(p, "/"); i >= 0 { - p = p[i+1:] - } - if p == "" { - return "" - } - - p = strings.Map(badToUnderscore, p) - // Identifier must not be keyword: insert _. - if isGoKeyword[p] { - p = "_" + p - } - // Identifier must not begin with digit: insert _. - if r, _ := utf8.DecodeRuneInString(p); unicode.IsDigit(r) { - p = "_" + p - } - return p -} - -// SetPackageNames sets the package name for this run. -// The package name must agree across all files being generated. -// It also defines unique package names for all imported files. -func (g *Generator) SetPackageNames() { - // Register the name for this package. It will be the first name - // registered so is guaranteed to be unmodified. - pkg, explicit := g.genFiles[0].goPackageName() - - // Check all files for an explicit go_package option. - for _, f := range g.genFiles { - thisPkg, thisExplicit := f.goPackageName() - if thisExplicit { - if !explicit { - // Let this file's go_package option serve for all input files. - pkg, explicit = thisPkg, true - } else if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } - } - } - - // If we don't have an explicit go_package option but we have an - // import path, use that. - if !explicit { - p := g.defaultGoPackage() - if p != "" { - pkg, explicit = p, true - } - } - - // If there was no go_package and no import path to use, - // double-check that all the inputs have the same implicit - // Go package name. - if !explicit { - for _, f := range g.genFiles { - thisPkg, _ := f.goPackageName() - if thisPkg != pkg { - g.Fail("inconsistent package names:", thisPkg, pkg) - } - } - } - - g.packageName = RegisterUniquePackageName(pkg, g.genFiles[0]) - - // Register the support package names. They might collide with the - // name of a package we import. - g.Pkg = map[string]string{ - "fmt": RegisterUniquePackageName("fmt", nil), - "math": RegisterUniquePackageName("math", nil), - "proto": RegisterUniquePackageName("proto", nil), - } - -AllFiles: - for _, f := range g.allFiles { - for _, genf := range g.genFiles { - if f == genf { - // In this package already. - uniquePackageName[f.FileDescriptorProto] = g.packageName - continue AllFiles - } - } - // The file is a dependency, so we want to ignore its go_package option - // because that is only relevant for its specific generated output. - pkg := f.GetPackage() - if pkg == "" { - pkg = baseName(*f.Name) - } - RegisterUniquePackageName(pkg, f) - } -} - -// WrapTypes walks the incoming data, wrapping DescriptorProtos, EnumDescriptorProtos -// and FileDescriptorProtos into file-referenced objects within the Generator. -// It also creates the list of files to generate and so should be called before GenerateAllFiles. -func (g *Generator) WrapTypes() { - g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile)) - g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles)) - for _, f := range g.Request.ProtoFile { - // We must wrap the descriptors before we wrap the enums - descs := wrapDescriptors(f) - g.buildNestedDescriptors(descs) - enums := wrapEnumDescriptors(f, descs) - g.buildNestedEnums(descs, enums) - exts := wrapExtensions(f) - fd := &FileDescriptor{ - FileDescriptorProto: f, - desc: descs, - enum: enums, - ext: exts, - exported: make(map[Object][]symbol), - proto3: fileIsProto3(f), - } - extractComments(fd) - g.allFiles = append(g.allFiles, fd) - g.allFilesByName[f.GetName()] = fd - } - for _, fd := range g.allFiles { - fd.imp = wrapImported(fd.FileDescriptorProto, g) - } - - g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate)) - for _, fileName := range g.Request.FileToGenerate { - fd := g.allFilesByName[fileName] - if fd == nil { - g.Fail("could not find file named", fileName) - } - fd.index = len(g.genFiles) - g.genFiles = append(g.genFiles, fd) - } -} - -// Scan the descriptors in this file. For each one, build the slice of nested descriptors -func (g *Generator) buildNestedDescriptors(descs []*Descriptor) { - for _, desc := range descs { - if len(desc.NestedType) != 0 { - for _, nest := range descs { - if nest.parent == desc { - desc.nested = append(desc.nested, nest) - } - } - if len(desc.nested) != len(desc.NestedType) { - g.Fail("internal error: nesting failure for", desc.GetName()) - } - } - } -} - -func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescriptor) { - for _, desc := range descs { - if len(desc.EnumType) != 0 { - for _, enum := range enums { - if enum.parent == desc { - desc.enums = append(desc.enums, enum) - } - } - if len(desc.enums) != len(desc.EnumType) { - g.Fail("internal error: enum nesting failure for", desc.GetName()) - } - } - } -} - -// Construct the Descriptor -func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *Descriptor { - d := &Descriptor{ - common: common{file}, - DescriptorProto: desc, - parent: parent, - index: index, - } - if parent == nil { - d.path = fmt.Sprintf("%d,%d", messagePath, index) - } else { - d.path = fmt.Sprintf("%s,%d,%d", parent.path, messageMessagePath, index) - } - - // The only way to distinguish a group from a message is whether - // the containing message has a TYPE_GROUP field that matches. - if parent != nil { - parts := d.TypeName() - if file.Package != nil { - parts = append([]string{*file.Package}, parts...) - } - exp := "." + strings.Join(parts, ".") - for _, field := range parent.Field { - if field.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP && field.GetTypeName() == exp { - d.group = true - break - } - } - } - - for _, field := range desc.Extension { - d.ext = append(d.ext, &ExtensionDescriptor{common{file}, field, d}) - } - - return d -} - -// Return a slice of all the Descriptors defined within this file -func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { - sl := make([]*Descriptor, 0, len(file.MessageType)+10) - for i, desc := range file.MessageType { - sl = wrapThisDescriptor(sl, desc, nil, file, i) - } - return sl -} - -// Wrap this Descriptor, recursively -func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) []*Descriptor { - sl = append(sl, newDescriptor(desc, parent, file, index)) - me := sl[len(sl)-1] - for i, nested := range desc.NestedType { - sl = wrapThisDescriptor(sl, nested, me, file, i) - } - return sl -} - -// Construct the EnumDescriptor -func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *EnumDescriptor { - ed := &EnumDescriptor{ - common: common{file}, - EnumDescriptorProto: desc, - parent: parent, - index: index, - } - if parent == nil { - ed.path = fmt.Sprintf("%d,%d", enumPath, index) - } else { - ed.path = fmt.Sprintf("%s,%d,%d", parent.path, messageEnumPath, index) - } - return ed -} - -// Return a slice of all the EnumDescriptors defined within this file -func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descriptor) []*EnumDescriptor { - sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10) - // Top-level enums. - for i, enum := range file.EnumType { - sl = append(sl, newEnumDescriptor(enum, nil, file, i)) - } - // Enums within messages. Enums within embedded messages appear in the outer-most message. - for _, nested := range descs { - for i, enum := range nested.EnumType { - sl = append(sl, newEnumDescriptor(enum, nested, file, i)) - } - } - return sl -} - -// Return a slice of all the top-level ExtensionDescriptors defined within this file. -func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor { - var sl []*ExtensionDescriptor - for _, field := range file.Extension { - sl = append(sl, &ExtensionDescriptor{common{file}, field, nil}) - } - return sl -} - -// Return a slice of all the types that are publicly imported into this file. -func wrapImported(file *descriptor.FileDescriptorProto, g *Generator) (sl []*ImportedDescriptor) { - for _, index := range file.PublicDependency { - df := g.fileByName(file.Dependency[index]) - for _, d := range df.desc { - if d.GetOptions().GetMapEntry() { - continue - } - sl = append(sl, &ImportedDescriptor{common{file}, d}) - } - for _, e := range df.enum { - sl = append(sl, &ImportedDescriptor{common{file}, e}) - } - for _, ext := range df.ext { - sl = append(sl, &ImportedDescriptor{common{file}, ext}) - } - } - return -} - -func extractComments(file *FileDescriptor) { - file.comments = make(map[string]*descriptor.SourceCodeInfo_Location) - for _, loc := range file.GetSourceCodeInfo().GetLocation() { - if loc.LeadingComments == nil { - continue - } - var p []string - for _, n := range loc.Path { - p = append(p, strconv.Itoa(int(n))) - } - file.comments[strings.Join(p, ",")] = loc - } -} - -// BuildTypeNameMap builds the map from fully qualified type names to objects. -// The key names for the map come from the input data, which puts a period at the beginning. -// It should be called after SetPackageNames and before GenerateAllFiles. -func (g *Generator) BuildTypeNameMap() { - g.typeNameToObject = make(map[string]Object) - for _, f := range g.allFiles { - // The names in this loop are defined by the proto world, not us, so the - // package name may be empty. If so, the dotted package name of X will - // be ".X"; otherwise it will be ".pkg.X". - dottedPkg := "." + f.GetPackage() - if dottedPkg != "." { - dottedPkg += "." - } - for _, enum := range f.enum { - name := dottedPkg + dottedSlice(enum.TypeName()) - g.typeNameToObject[name] = enum - } - for _, desc := range f.desc { - name := dottedPkg + dottedSlice(desc.TypeName()) - g.typeNameToObject[name] = desc - } - } -} - -// ObjectNamed, given a fully-qualified input type name as it appears in the input data, -// returns the descriptor for the message or enum with that name. -func (g *Generator) ObjectNamed(typeName string) Object { - o, ok := g.typeNameToObject[typeName] - if !ok { - g.Fail("can't find object with type", typeName) - } - - // If the file of this object isn't a direct dependency of the current file, - // or in the current file, then this object has been publicly imported into - // a dependency of the current file. - // We should return the ImportedDescriptor object for it instead. - direct := *o.File().Name == *g.file.Name - if !direct { - for _, dep := range g.file.Dependency { - if *g.fileByName(dep).Name == *o.File().Name { - direct = true - break - } - } - } - if !direct { - found := false - Loop: - for _, dep := range g.file.Dependency { - df := g.fileByName(*g.fileByName(dep).Name) - for _, td := range df.imp { - if td.o == o { - // Found it! - o = td - found = true - break Loop - } - } - } - if !found { - log.Printf("protoc-gen-go: WARNING: failed finding publicly imported dependency for %v, used in %v", typeName, *g.file.Name) - } - } - - return o -} - -// P prints the arguments to the generated output. It handles strings and int32s, plus -// handling indirections because they may be *string, etc. -func (g *Generator) P(str ...interface{}) { - if !g.writeOutput { - return - } - g.WriteString(g.indent) - for _, v := range str { - switch s := v.(type) { - case string: - g.WriteString(s) - case *string: - g.WriteString(*s) - case bool: - fmt.Fprintf(g, "%t", s) - case *bool: - fmt.Fprintf(g, "%t", *s) - case int: - fmt.Fprintf(g, "%d", s) - case *int32: - fmt.Fprintf(g, "%d", *s) - case *int64: - fmt.Fprintf(g, "%d", *s) - case float64: - fmt.Fprintf(g, "%g", s) - case *float64: - fmt.Fprintf(g, "%g", *s) - default: - g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) - } - } - g.WriteByte('\n') -} - -// addInitf stores the given statement to be printed inside the file's init function. -// The statement is given as a format specifier and arguments. -func (g *Generator) addInitf(stmt string, a ...interface{}) { - g.init = append(g.init, fmt.Sprintf(stmt, a...)) -} - -// In Indents the output one tab stop. -func (g *Generator) In() { g.indent += "\t" } - -// Out unindents the output one tab stop. -func (g *Generator) Out() { - if len(g.indent) > 0 { - g.indent = g.indent[1:] - } -} - -// GenerateAllFiles generates the output for all the files we're outputting. -func (g *Generator) GenerateAllFiles() { - // Initialize the plugins - for _, p := range plugins { - p.Init(g) - } - // Generate the output. The generator runs for every file, even the files - // that we don't generate output for, so that we can collate the full list - // of exported symbols to support public imports. - genFileMap := make(map[*FileDescriptor]bool, len(g.genFiles)) - for _, file := range g.genFiles { - genFileMap[file] = true - } - for _, file := range g.allFiles { - g.Reset() - g.writeOutput = genFileMap[file] - g.generate(file) - if !g.writeOutput { - continue - } - g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ - Name: proto.String(file.goFileName()), - Content: proto.String(g.String()), - }) - } -} - -// Run all the plugins associated with the file. -func (g *Generator) runPlugins(file *FileDescriptor) { - for _, p := range plugins { - p.Generate(file) - } -} - -// FileOf return the FileDescriptor for this FileDescriptorProto. -func (g *Generator) FileOf(fd *descriptor.FileDescriptorProto) *FileDescriptor { - for _, file := range g.allFiles { - if file.FileDescriptorProto == fd { - return file - } - } - g.Fail("could not find file in table:", fd.GetName()) - return nil -} - -// Fill the response protocol buffer with the generated output for all the files we're -// supposed to generate. -func (g *Generator) generate(file *FileDescriptor) { - g.file = g.FileOf(file.FileDescriptorProto) - g.usedPackages = make(map[string]bool) - - if g.file.index == 0 { - // For one file in the package, assert version compatibility. - g.P("// This is a compile-time assertion to ensure that this generated file") - g.P("// is compatible with the proto package it is being compiled against.") - g.P("// A compilation error at this line likely means your copy of the") - g.P("// proto package needs to be updated.") - g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") - g.P() - } - for _, td := range g.file.imp { - g.generateImported(td) - } - for _, enum := range g.file.enum { - g.generateEnum(enum) - } - for _, desc := range g.file.desc { - // Don't generate virtual messages for maps. - if desc.GetOptions().GetMapEntry() { - continue - } - g.generateMessage(desc) - } - for _, ext := range g.file.ext { - g.generateExtension(ext) - } - g.generateInitFunction() - - // Run the plugins before the imports so we know which imports are necessary. - g.runPlugins(file) - - g.generateFileDescriptor(file) - - // Generate header and imports last, though they appear first in the output. - rem := g.Buffer - g.Buffer = new(bytes.Buffer) - g.generateHeader() - g.generateImports() - if !g.writeOutput { - return - } - g.Write(rem.Bytes()) - - // Reformat generated code. - fset := token.NewFileSet() - raw := g.Bytes() - ast, err := parser.ParseFile(fset, "", g, parser.ParseComments) - if err != nil { - // Print out the bad code with line numbers. - // This should never happen in practice, but it can while changing generated code, - // so consider this a debugging aid. - var src bytes.Buffer - s := bufio.NewScanner(bytes.NewReader(raw)) - for line := 1; s.Scan(); line++ { - fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) - } - g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String()) - } - g.Reset() - err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast) - if err != nil { - g.Fail("generated Go source code could not be reformatted:", err.Error()) - } -} - -// Generate the header, including package definition -func (g *Generator) generateHeader() { - g.P("// Code generated by protoc-gen-go.") - g.P("// source: ", g.file.Name) - g.P("// DO NOT EDIT!") - g.P() - - name := g.file.PackageName() - - if g.file.index == 0 { - // Generate package docs for the first file in the package. - g.P("/*") - g.P("Package ", name, " is a generated protocol buffer package.") - g.P() - if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { - // not using g.PrintComments because this is a /* */ comment block. - text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") - for _, line := range strings.Split(text, "\n") { - line = strings.TrimPrefix(line, " ") - // ensure we don't escape from the block comment - line = strings.Replace(line, "*/", "* /", -1) - g.P(line) - } - g.P() - } - var topMsgs []string - g.P("It is generated from these files:") - for _, f := range g.genFiles { - g.P("\t", f.Name) - for _, msg := range f.desc { - if msg.parent != nil { - continue - } - topMsgs = append(topMsgs, CamelCaseSlice(msg.TypeName())) - } - } - g.P() - g.P("It has these top-level messages:") - for _, msg := range topMsgs { - g.P("\t", msg) - } - g.P("*/") - } - - g.P("package ", name) - g.P() -} - -// PrintComments prints any comments from the source .proto file. -// The path is a comma-separated list of integers. -// It returns an indication of whether any comments were printed. -// See descriptor.proto for its format. -func (g *Generator) PrintComments(path string) bool { - if !g.writeOutput { - return false - } - if loc, ok := g.file.comments[path]; ok { - text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") - for _, line := range strings.Split(text, "\n") { - g.P("// ", strings.TrimPrefix(line, " ")) - } - return true - } - return false -} - -func (g *Generator) fileByName(filename string) *FileDescriptor { - return g.allFilesByName[filename] -} - -// weak returns whether the ith import of the current file is a weak import. -func (g *Generator) weak(i int32) bool { - for _, j := range g.file.WeakDependency { - if j == i { - return true - } - } - return false -} - -// Generate the imports -func (g *Generator) generateImports() { - // We almost always need a proto import. Rather than computing when we - // do, which is tricky when there's a plugin, just import it and - // reference it later. The same argument applies to the fmt and math packages. - g.P("import " + g.Pkg["proto"] + " " + strconv.Quote(g.ImportPrefix+"github.com/golang/protobuf/proto")) - g.P("import " + g.Pkg["fmt"] + ` "fmt"`) - g.P("import " + g.Pkg["math"] + ` "math"`) - for i, s := range g.file.Dependency { - fd := g.fileByName(s) - // Do not import our own package. - if fd.PackageName() == g.packageName { - continue - } - filename := fd.goFileName() - // By default, import path is the dirname of the Go filename. - importPath := path.Dir(filename) - if substitution, ok := g.ImportMap[s]; ok { - importPath = substitution - } - importPath = g.ImportPrefix + importPath - // Skip weak imports. - if g.weak(int32(i)) { - g.P("// skipping weak import ", fd.PackageName(), " ", strconv.Quote(importPath)) - continue - } - // We need to import all the dependencies, even if we don't reference them, - // because other code and tools depend on having the full transitive closure - // of protocol buffer types in the binary. - pname := fd.PackageName() - if _, ok := g.usedPackages[pname]; !ok { - pname = "_" - } - g.P("import ", pname, " ", strconv.Quote(importPath)) - } - g.P() - // TODO: may need to worry about uniqueness across plugins - for _, p := range plugins { - p.GenerateImports(g.file) - g.P() - } - g.P("// Reference imports to suppress errors if they are not otherwise used.") - g.P("var _ = ", g.Pkg["proto"], ".Marshal") - g.P("var _ = ", g.Pkg["fmt"], ".Errorf") - g.P("var _ = ", g.Pkg["math"], ".Inf") - g.P() -} - -func (g *Generator) generateImported(id *ImportedDescriptor) { - // Don't generate public import symbols for files that we are generating - // code for, since those symbols will already be in this package. - // We can't simply avoid creating the ImportedDescriptor objects, - // because g.genFiles isn't populated at that stage. - tn := id.TypeName() - sn := tn[len(tn)-1] - df := g.FileOf(id.o.File()) - filename := *df.Name - for _, fd := range g.genFiles { - if *fd.Name == filename { - g.P("// Ignoring public import of ", sn, " from ", filename) - g.P() - return - } - } - g.P("// ", sn, " from public import ", filename) - g.usedPackages[df.PackageName()] = true - - for _, sym := range df.exported[id.o] { - sym.GenerateAlias(g, df.PackageName()) - } - - g.P() -} - -// Generate the enum definitions for this EnumDescriptor. -func (g *Generator) generateEnum(enum *EnumDescriptor) { - // The full type name - typeName := enum.TypeName() - // The full type name, CamelCased. - ccTypeName := CamelCaseSlice(typeName) - ccPrefix := enum.prefix() - - g.PrintComments(enum.path) - g.P("type ", ccTypeName, " int32") - g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()}) - g.P("const (") - g.In() - for i, e := range enum.Value { - g.PrintComments(fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)) - - name := ccPrefix + *e.Name - g.P(name, " ", ccTypeName, " = ", e.Number) - g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName}) - } - g.Out() - g.P(")") - g.P("var ", ccTypeName, "_name = map[int32]string{") - g.In() - generated := make(map[int32]bool) // avoid duplicate values - for _, e := range enum.Value { - duplicate := "" - if _, present := generated[*e.Number]; present { - duplicate = "// Duplicate value: " - } - g.P(duplicate, e.Number, ": ", strconv.Quote(*e.Name), ",") - generated[*e.Number] = true - } - g.Out() - g.P("}") - g.P("var ", ccTypeName, "_value = map[string]int32{") - g.In() - for _, e := range enum.Value { - g.P(strconv.Quote(*e.Name), ": ", e.Number, ",") - } - g.Out() - g.P("}") - - if !enum.proto3() { - g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {") - g.In() - g.P("p := new(", ccTypeName, ")") - g.P("*p = x") - g.P("return p") - g.Out() - g.P("}") - } - - g.P("func (x ", ccTypeName, ") String() string {") - g.In() - g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))") - g.Out() - g.P("}") - - if !enum.proto3() { - g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {") - g.In() - g.P("value, err := ", g.Pkg["proto"], ".UnmarshalJSONEnum(", ccTypeName, `_value, data, "`, ccTypeName, `")`) - g.P("if err != nil {") - g.In() - g.P("return err") - g.Out() - g.P("}") - g.P("*x = ", ccTypeName, "(value)") - g.P("return nil") - g.Out() - g.P("}") - } - - var indexes []string - for m := enum.parent; m != nil; m = m.parent { - // XXX: skip groups? - indexes = append([]string{strconv.Itoa(m.index)}, indexes...) - } - indexes = append(indexes, strconv.Itoa(enum.index)) - g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") - if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" { - g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`) - } - - g.P() -} - -// The tag is a string like "varint,2,opt,name=fieldname,def=7" that -// identifies details of the field for the protocol buffer marshaling and unmarshaling -// code. The fields are: -// wire encoding -// protocol tag number -// opt,req,rep for optional, required, or repeated -// packed whether the encoding is "packed" (optional; repeated primitives only) -// name= the original declared name -// enum= the name of the enum type if it is an enum-typed field. -// proto3 if this field is in a proto3 message -// def= string representation of the default value, if any. -// The default value must be in a representation that can be used at run-time -// to generate the default value. Thus bools become 0 and 1, for instance. -func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptorProto, wiretype string) string { - optrepreq := "" - switch { - case isOptional(field): - optrepreq = "opt" - case isRequired(field): - optrepreq = "req" - case isRepeated(field): - optrepreq = "rep" - } - var defaultValue string - if dv := field.DefaultValue; dv != nil { // set means an explicit default - defaultValue = *dv - // Some types need tweaking. - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BOOL: - if defaultValue == "true" { - defaultValue = "1" - } else { - defaultValue = "0" - } - case descriptor.FieldDescriptorProto_TYPE_STRING, - descriptor.FieldDescriptorProto_TYPE_BYTES: - // Nothing to do. Quoting is done for the whole tag. - case descriptor.FieldDescriptorProto_TYPE_ENUM: - // For enums we need to provide the integer constant. - obj := g.ObjectNamed(field.GetTypeName()) - if id, ok := obj.(*ImportedDescriptor); ok { - // It is an enum that was publicly imported. - // We need the underlying type. - obj = id.o - } - enum, ok := obj.(*EnumDescriptor) - if !ok { - log.Printf("obj is a %T", obj) - if id, ok := obj.(*ImportedDescriptor); ok { - log.Printf("id.o is a %T", id.o) - } - g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName())) - } - defaultValue = enum.integerValueAsString(defaultValue) - } - defaultValue = ",def=" + defaultValue - } - enum := "" - if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM { - // We avoid using obj.PackageName(), because we want to use the - // original (proto-world) package name. - obj := g.ObjectNamed(field.GetTypeName()) - if id, ok := obj.(*ImportedDescriptor); ok { - obj = id.o - } - enum = ",enum=" - if pkg := obj.File().GetPackage(); pkg != "" { - enum += pkg + "." - } - enum += CamelCaseSlice(obj.TypeName()) - } - packed := "" - if (field.Options != nil && field.Options.GetPacked()) || - // Per https://developers.google.com/protocol-buffers/docs/proto3#simple: - // "In proto3, repeated fields of scalar numeric types use packed encoding by default." - (message.proto3() && (field.Options == nil || field.Options.Packed == nil) && - isRepeated(field) && isScalar(field)) { - packed = ",packed" - } - fieldName := field.GetName() - name := fieldName - if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - // We must use the type name for groups instead of - // the field name to preserve capitalization. - // type_name in FieldDescriptorProto is fully-qualified, - // but we only want the local part. - name = *field.TypeName - if i := strings.LastIndex(name, "."); i >= 0 { - name = name[i+1:] - } - } - if json := field.GetJsonName(); json != "" && json != name { - // TODO: escaping might be needed, in which case - // perhaps this should be in its own "json" tag. - name += ",json=" + json - } - name = ",name=" + name - if message.proto3() { - // We only need the extra tag for []byte fields; - // no need to add noise for the others. - if *field.Type == descriptor.FieldDescriptorProto_TYPE_BYTES { - name += ",proto3" - } - - } - oneof := "" - if field.OneofIndex != nil { - oneof = ",oneof" - } - return strconv.Quote(fmt.Sprintf("%s,%d,%s%s%s%s%s%s", - wiretype, - field.GetNumber(), - optrepreq, - packed, - name, - enum, - oneof, - defaultValue)) -} - -func needsStar(typ descriptor.FieldDescriptorProto_Type) bool { - switch typ { - case descriptor.FieldDescriptorProto_TYPE_GROUP: - return false - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - return false - case descriptor.FieldDescriptorProto_TYPE_BYTES: - return false - } - return true -} - -// TypeName is the printed name appropriate for an item. If the object is in the current file, -// TypeName drops the package name and underscores the rest. -// Otherwise the object is from another package; and the result is the underscored -// package name followed by the item name. -// The result always has an initial capital. -func (g *Generator) TypeName(obj Object) string { - return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName()) -} - -// TypeNameWithPackage is like TypeName, but always includes the package -// name even if the object is in our own package. -func (g *Generator) TypeNameWithPackage(obj Object) string { - return obj.PackageName() + CamelCaseSlice(obj.TypeName()) -} - -// GoType returns a string representing the type name, and the wire type -func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) { - // TODO: Options. - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - typ, wire = "float64", "fixed64" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - typ, wire = "float32", "fixed32" - case descriptor.FieldDescriptorProto_TYPE_INT64: - typ, wire = "int64", "varint" - case descriptor.FieldDescriptorProto_TYPE_UINT64: - typ, wire = "uint64", "varint" - case descriptor.FieldDescriptorProto_TYPE_INT32: - typ, wire = "int32", "varint" - case descriptor.FieldDescriptorProto_TYPE_UINT32: - typ, wire = "uint32", "varint" - case descriptor.FieldDescriptorProto_TYPE_FIXED64: - typ, wire = "uint64", "fixed64" - case descriptor.FieldDescriptorProto_TYPE_FIXED32: - typ, wire = "uint32", "fixed32" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - typ, wire = "bool", "varint" - case descriptor.FieldDescriptorProto_TYPE_STRING: - typ, wire = "string", "bytes" - case descriptor.FieldDescriptorProto_TYPE_GROUP: - desc := g.ObjectNamed(field.GetTypeName()) - typ, wire = "*"+g.TypeName(desc), "group" - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - desc := g.ObjectNamed(field.GetTypeName()) - typ, wire = "*"+g.TypeName(desc), "bytes" - case descriptor.FieldDescriptorProto_TYPE_BYTES: - typ, wire = "[]byte", "bytes" - case descriptor.FieldDescriptorProto_TYPE_ENUM: - desc := g.ObjectNamed(field.GetTypeName()) - typ, wire = g.TypeName(desc), "varint" - case descriptor.FieldDescriptorProto_TYPE_SFIXED32: - typ, wire = "int32", "fixed32" - case descriptor.FieldDescriptorProto_TYPE_SFIXED64: - typ, wire = "int64", "fixed64" - case descriptor.FieldDescriptorProto_TYPE_SINT32: - typ, wire = "int32", "zigzag32" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - typ, wire = "int64", "zigzag64" - default: - g.Fail("unknown type for", field.GetName()) - } - if isRepeated(field) { - typ = "[]" + typ - } else if message != nil && message.proto3() { - return - } else if field.OneofIndex != nil && message != nil { - return - } else if needsStar(*field.Type) { - typ = "*" + typ - } - return -} - -func (g *Generator) RecordTypeUse(t string) { - if obj, ok := g.typeNameToObject[t]; ok { - // Call ObjectNamed to get the true object to record the use. - obj = g.ObjectNamed(t) - g.usedPackages[obj.PackageName()] = true - } -} - -// Method names that may be generated. Fields with these names get an -// underscore appended. Any change to this set is a potential incompatible -// API change because it changes generated field names. -var methodNames = [...]string{ - "Reset", - "String", - "ProtoMessage", - "Marshal", - "Unmarshal", - "ExtensionRangeArray", - "ExtensionMap", - "Descriptor", -} - -// Names of messages in the `google.protobuf` package for which -// we will generate XXX_WellKnownType methods. -var wellKnownTypes = map[string]bool{ - "Any": true, - "Duration": true, - "Empty": true, - "Struct": true, - "Timestamp": true, - - "Value": true, - "ListValue": true, - "DoubleValue": true, - "FloatValue": true, - "Int64Value": true, - "UInt64Value": true, - "Int32Value": true, - "UInt32Value": true, - "BoolValue": true, - "StringValue": true, - "BytesValue": true, -} - -// Generate the type and default constant definitions for this Descriptor. -func (g *Generator) generateMessage(message *Descriptor) { - // The full type name - typeName := message.TypeName() - // The full type name, CamelCased. - ccTypeName := CamelCaseSlice(typeName) - - usedNames := make(map[string]bool) - for _, n := range methodNames { - usedNames[n] = true - } - fieldNames := make(map[*descriptor.FieldDescriptorProto]string) - fieldGetterNames := make(map[*descriptor.FieldDescriptorProto]string) - fieldTypes := make(map[*descriptor.FieldDescriptorProto]string) - mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) - - oneofFieldName := make(map[int32]string) // indexed by oneof_index field of FieldDescriptorProto - oneofDisc := make(map[int32]string) // name of discriminator method - oneofTypeName := make(map[*descriptor.FieldDescriptorProto]string) // without star - oneofInsertPoints := make(map[int32]int) // oneof_index => offset of g.Buffer - - g.PrintComments(message.path) - g.P("type ", ccTypeName, " struct {") - g.In() - - // allocNames finds a conflict-free variation of the given strings, - // consistently mutating their suffixes. - // It returns the same number of strings. - allocNames := func(ns ...string) []string { - Loop: - for { - for _, n := range ns { - if usedNames[n] { - for i := range ns { - ns[i] += "_" - } - continue Loop - } - } - for _, n := range ns { - usedNames[n] = true - } - return ns - } - } - - for i, field := range message.Field { - // Allocate the getter and the field at the same time so name - // collisions create field/method consistent names. - // TODO: This allocation occurs based on the order of the fields - // in the proto file, meaning that a change in the field - // ordering can change generated Method/Field names. - base := CamelCase(*field.Name) - ns := allocNames(base, "Get"+base) - fieldName, fieldGetterName := ns[0], ns[1] - typename, wiretype := g.GoType(message, field) - jsonName := *field.Name - tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty") - - fieldNames[field] = fieldName - fieldGetterNames[field] = fieldGetterName - - oneof := field.OneofIndex != nil - if oneof && oneofFieldName[*field.OneofIndex] == "" { - odp := message.OneofDecl[int(*field.OneofIndex)] - fname := allocNames(CamelCase(odp.GetName()))[0] - - // This is the first field of a oneof we haven't seen before. - // Generate the union field. - com := g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)) - if com { - g.P("//") - } - g.P("// Types that are valid to be assigned to ", fname, ":") - // Generate the rest of this comment later, - // when we've computed any disambiguation. - oneofInsertPoints[*field.OneofIndex] = g.Buffer.Len() - - dname := "is" + ccTypeName + "_" + fname - oneofFieldName[*field.OneofIndex] = fname - oneofDisc[*field.OneofIndex] = dname - tag := `protobuf_oneof:"` + odp.GetName() + `"` - g.P(fname, " ", dname, " `", tag, "`") - } - - if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { - desc := g.ObjectNamed(field.GetTypeName()) - if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() { - // Figure out the Go types and tags for the key and value types. - keyField, valField := d.Field[0], d.Field[1] - keyType, keyWire := g.GoType(d, keyField) - valType, valWire := g.GoType(d, valField) - keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire) - - // We don't use stars, except for message-typed values. - // Message and enum types are the only two possibly foreign types used in maps, - // so record their use. They are not permitted as map keys. - keyType = strings.TrimPrefix(keyType, "*") - switch *valField.Type { - case descriptor.FieldDescriptorProto_TYPE_ENUM: - valType = strings.TrimPrefix(valType, "*") - g.RecordTypeUse(valField.GetTypeName()) - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - g.RecordTypeUse(valField.GetTypeName()) - default: - valType = strings.TrimPrefix(valType, "*") - } - - typename = fmt.Sprintf("map[%s]%s", keyType, valType) - mapFieldTypes[field] = typename // record for the getter generation - - tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag) - } - } - - fieldTypes[field] = typename - - if oneof { - tname := ccTypeName + "_" + fieldName - // It is possible for this to collide with a message or enum - // nested in this message. Check for collisions. - for { - ok := true - for _, desc := range message.nested { - if CamelCaseSlice(desc.TypeName()) == tname { - ok = false - break - } - } - for _, enum := range message.enums { - if CamelCaseSlice(enum.TypeName()) == tname { - ok = false - break - } - } - if !ok { - tname += "_" - continue - } - break - } - - oneofTypeName[field] = tname - continue - } - - g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)) - g.P(fieldName, "\t", typename, "\t`", tag, "`") - g.RecordTypeUse(field.GetTypeName()) - } - if len(message.ExtensionRange) > 0 { - g.P(g.Pkg["proto"], ".XXX_InternalExtensions `json:\"-\"`") - } - if !message.proto3() { - g.P("XXX_unrecognized\t[]byte `json:\"-\"`") - } - g.Out() - g.P("}") - - // Update g.Buffer to list valid oneof types. - // We do this down here, after we've disambiguated the oneof type names. - // We go in reverse order of insertion point to avoid invalidating offsets. - for oi := int32(len(message.OneofDecl)); oi >= 0; oi-- { - ip := oneofInsertPoints[oi] - all := g.Buffer.Bytes() - rem := all[ip:] - g.Buffer = bytes.NewBuffer(all[:ip:ip]) // set cap so we don't scribble on rem - for _, field := range message.Field { - if field.OneofIndex == nil || *field.OneofIndex != oi { - continue - } - g.P("//\t*", oneofTypeName[field]) - } - g.Buffer.Write(rem) - } - - // Reset, String and ProtoMessage methods. - g.P("func (m *", ccTypeName, ") Reset() { *m = ", ccTypeName, "{} }") - g.P("func (m *", ccTypeName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }") - g.P("func (*", ccTypeName, ") ProtoMessage() {}") - var indexes []string - for m := message; m != nil; m = m.parent { - indexes = append([]string{strconv.Itoa(m.index)}, indexes...) - } - g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") - // TODO: Revisit the decision to use a XXX_WellKnownType method - // if we change proto.MessageName to work with multiple equivalents. - if message.file.GetPackage() == "google.protobuf" && wellKnownTypes[message.GetName()] { - g.P("func (*", ccTypeName, `) XXX_WellKnownType() string { return "`, message.GetName(), `" }`) - } - - // Extension support methods - var hasExtensions, isMessageSet bool - if len(message.ExtensionRange) > 0 { - hasExtensions = true - // message_set_wire_format only makes sense when extensions are defined. - if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { - isMessageSet = true - g.P() - g.P("func (m *", ccTypeName, ") Marshal() ([]byte, error) {") - g.In() - g.P("return ", g.Pkg["proto"], ".MarshalMessageSet(&m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") Unmarshal(buf []byte) error {") - g.In() - g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSet(buf, &m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") MarshalJSON() ([]byte, error) {") - g.In() - g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("func (m *", ccTypeName, ") UnmarshalJSON(buf []byte) error {") - g.In() - g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)") - g.Out() - g.P("}") - g.P("// ensure ", ccTypeName, " satisfies proto.Marshaler and proto.Unmarshaler") - g.P("var _ ", g.Pkg["proto"], ".Marshaler = (*", ccTypeName, ")(nil)") - g.P("var _ ", g.Pkg["proto"], ".Unmarshaler = (*", ccTypeName, ")(nil)") - } - - g.P() - g.P("var extRange_", ccTypeName, " = []", g.Pkg["proto"], ".ExtensionRange{") - g.In() - for _, r := range message.ExtensionRange { - end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends - g.P("{", r.Start, ", ", end, "},") - } - g.Out() - g.P("}") - g.P("func (*", ccTypeName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {") - g.In() - g.P("return extRange_", ccTypeName) - g.Out() - g.P("}") - } - - // Default constants - defNames := make(map[*descriptor.FieldDescriptorProto]string) - for _, field := range message.Field { - def := field.GetDefaultValue() - if def == "" { - continue - } - fieldname := "Default_" + ccTypeName + "_" + CamelCase(*field.Name) - defNames[field] = fieldname - typename, _ := g.GoType(message, field) - if typename[0] == '*' { - typename = typename[1:] - } - kind := "const " - switch { - case typename == "bool": - case typename == "string": - def = strconv.Quote(def) - case typename == "[]byte": - def = "[]byte(" + strconv.Quote(def) + ")" - kind = "var " - case def == "inf", def == "-inf", def == "nan": - // These names are known to, and defined by, the protocol language. - switch def { - case "inf": - def = "math.Inf(1)" - case "-inf": - def = "math.Inf(-1)" - case "nan": - def = "math.NaN()" - } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_FLOAT { - def = "float32(" + def + ")" - } - kind = "var " - case *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM: - // Must be an enum. Need to construct the prefixed name. - obj := g.ObjectNamed(field.GetTypeName()) - var enum *EnumDescriptor - if id, ok := obj.(*ImportedDescriptor); ok { - // The enum type has been publicly imported. - enum, _ = id.o.(*EnumDescriptor) - } else { - enum, _ = obj.(*EnumDescriptor) - } - if enum == nil { - log.Printf("don't know how to generate constant for %s", fieldname) - continue - } - def = g.DefaultPackageName(obj) + enum.prefix() + def - } - g.P(kind, fieldname, " ", typename, " = ", def) - g.file.addExport(message, constOrVarSymbol{fieldname, kind, ""}) - } - g.P() - - // Oneof per-field types, discriminants and getters. - // - // Generate unexported named types for the discriminant interfaces. - // We shouldn't have to do this, but there was (~19 Aug 2015) a compiler/linker bug - // that was triggered by using anonymous interfaces here. - // TODO: Revisit this and consider reverting back to anonymous interfaces. - for oi := range message.OneofDecl { - dname := oneofDisc[int32(oi)] - g.P("type ", dname, " interface { ", dname, "() }") - } - g.P() - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } - _, wiretype := g.GoType(message, field) - tag := "protobuf:" + g.goTag(message, field, wiretype) - g.P("type ", oneofTypeName[field], " struct{ ", fieldNames[field], " ", fieldTypes[field], " `", tag, "` }") - g.RecordTypeUse(field.GetTypeName()) - } - g.P() - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } - g.P("func (*", oneofTypeName[field], ") ", oneofDisc[*field.OneofIndex], "() {}") - } - g.P() - for oi := range message.OneofDecl { - fname := oneofFieldName[int32(oi)] - g.P("func (m *", ccTypeName, ") Get", fname, "() ", oneofDisc[int32(oi)], " {") - g.P("if m != nil { return m.", fname, " }") - g.P("return nil") - g.P("}") - } - g.P() - - // Field getters - var getters []getterSymbol - for _, field := range message.Field { - oneof := field.OneofIndex != nil - - fname := fieldNames[field] - typename, _ := g.GoType(message, field) - if t, ok := mapFieldTypes[field]; ok { - typename = t - } - mname := fieldGetterNames[field] - star := "" - if needsStar(*field.Type) && typename[0] == '*' { - typename = typename[1:] - star = "*" - } - - // Only export getter symbols for basic types, - // and for messages and enums in the same package. - // Groups are not exported. - // Foreign types can't be hoisted through a public import because - // the importer may not already be importing the defining .proto. - // As an example, imagine we have an import tree like this: - // A.proto -> B.proto -> C.proto - // If A publicly imports B, we need to generate the getters from B in A's output, - // but if one such getter returns something from C then we cannot do that - // because A is not importing C already. - var getter, genType bool - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_GROUP: - getter = false - case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_ENUM: - // Only export getter if its return type is in this package. - getter = g.ObjectNamed(field.GetTypeName()).PackageName() == message.PackageName() - genType = true - default: - getter = true - } - if getter { - getters = append(getters, getterSymbol{ - name: mname, - typ: typename, - typeName: field.GetTypeName(), - genType: genType, - }) - } - - g.P("func (m *", ccTypeName, ") "+mname+"() "+typename+" {") - g.In() - def, hasDef := defNames[field] - typeDefaultIsNil := false // whether this field type's default value is a literal nil unless specified - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BYTES: - typeDefaultIsNil = !hasDef - case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE: - typeDefaultIsNil = true - } - if isRepeated(field) { - typeDefaultIsNil = true - } - if typeDefaultIsNil && !oneof { - // A bytes field with no explicit default needs less generated code, - // as does a message or group field, or a repeated field. - g.P("if m != nil {") - g.In() - g.P("return m." + fname) - g.Out() - g.P("}") - g.P("return nil") - g.Out() - g.P("}") - g.P() - continue - } - if !oneof { - if message.proto3() { - g.P("if m != nil {") - } else { - g.P("if m != nil && m." + fname + " != nil {") - } - g.In() - g.P("return " + star + "m." + fname) - g.Out() - g.P("}") - } else { - uname := oneofFieldName[*field.OneofIndex] - tname := oneofTypeName[field] - g.P("if x, ok := m.Get", uname, "().(*", tname, "); ok {") - g.P("return x.", fname) - g.P("}") - } - if hasDef { - if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES { - g.P("return " + def) - } else { - // The default is a []byte var. - // Make a copy when returning it to be safe. - g.P("return append([]byte(nil), ", def, "...)") - } - } else { - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BOOL: - g.P("return false") - case descriptor.FieldDescriptorProto_TYPE_STRING: - g.P(`return ""`) - case descriptor.FieldDescriptorProto_TYPE_GROUP, - descriptor.FieldDescriptorProto_TYPE_MESSAGE, - descriptor.FieldDescriptorProto_TYPE_BYTES: - // This is only possible for oneof fields. - g.P("return nil") - case descriptor.FieldDescriptorProto_TYPE_ENUM: - // The default default for an enum is the first value in the enum, - // not zero. - obj := g.ObjectNamed(field.GetTypeName()) - var enum *EnumDescriptor - if id, ok := obj.(*ImportedDescriptor); ok { - // The enum type has been publicly imported. - enum, _ = id.o.(*EnumDescriptor) - } else { - enum, _ = obj.(*EnumDescriptor) - } - if enum == nil { - log.Printf("don't know how to generate getter for %s", field.GetName()) - continue - } - if len(enum.Value) == 0 { - g.P("return 0 // empty enum") - } else { - first := enum.Value[0].GetName() - g.P("return ", g.DefaultPackageName(obj)+enum.prefix()+first) - } - default: - g.P("return 0") - } - } - g.Out() - g.P("}") - g.P() - } - - if !message.group { - ms := &messageSymbol{ - sym: ccTypeName, - hasExtensions: hasExtensions, - isMessageSet: isMessageSet, - hasOneof: len(message.OneofDecl) > 0, - getters: getters, - } - g.file.addExport(message, ms) - } - - // Oneof functions - if len(message.OneofDecl) > 0 { - fieldWire := make(map[*descriptor.FieldDescriptorProto]string) - - // method - enc := "_" + ccTypeName + "_OneofMarshaler" - dec := "_" + ccTypeName + "_OneofUnmarshaler" - size := "_" + ccTypeName + "_OneofSizer" - encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" - decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" - sizeSig := "(msg " + g.Pkg["proto"] + ".Message) (n int)" - - g.P("// XXX_OneofFuncs is for the internal use of the proto package.") - g.P("func (*", ccTypeName, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") - g.P("return ", enc, ", ", dec, ", ", size, ", []interface{}{") - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } - g.P("(*", oneofTypeName[field], ")(nil),") - } - g.P("}") - g.P("}") - g.P() - - // marshaler - g.P("func ", enc, encSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - for oi, odp := range message.OneofDecl { - g.P("// ", odp.GetName()) - fname := oneofFieldName[int32(oi)] - g.P("switch x := m.", fname, ".(type) {") - for _, field := range message.Field { - if field.OneofIndex == nil || int(*field.OneofIndex) != oi { - continue - } - g.P("case *", oneofTypeName[field], ":") - var wire, pre, post string - val := "x." + fieldNames[field] // overridden for TYPE_BOOL - canFail := false // only TYPE_MESSAGE and TYPE_GROUP can fail - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - wire = "WireFixed64" - pre = "b.EncodeFixed64(" + g.Pkg["math"] + ".Float64bits(" - post = "))" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - wire = "WireFixed32" - pre = "b.EncodeFixed32(uint64(" + g.Pkg["math"] + ".Float32bits(" - post = ")))" - case descriptor.FieldDescriptorProto_TYPE_INT64, - descriptor.FieldDescriptorProto_TYPE_UINT64: - wire = "WireVarint" - pre, post = "b.EncodeVarint(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_INT32, - descriptor.FieldDescriptorProto_TYPE_UINT32, - descriptor.FieldDescriptorProto_TYPE_ENUM: - wire = "WireVarint" - pre, post = "b.EncodeVarint(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_FIXED64, - descriptor.FieldDescriptorProto_TYPE_SFIXED64: - wire = "WireFixed64" - pre, post = "b.EncodeFixed64(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_FIXED32, - descriptor.FieldDescriptorProto_TYPE_SFIXED32: - wire = "WireFixed32" - pre, post = "b.EncodeFixed32(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - // bool needs special handling. - g.P("t := uint64(0)") - g.P("if ", val, " { t = 1 }") - val = "t" - wire = "WireVarint" - pre, post = "b.EncodeVarint(", ")" - case descriptor.FieldDescriptorProto_TYPE_STRING: - wire = "WireBytes" - pre, post = "b.EncodeStringBytes(", ")" - case descriptor.FieldDescriptorProto_TYPE_GROUP: - wire = "WireStartGroup" - pre, post = "b.Marshal(", ")" - canFail = true - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - wire = "WireBytes" - pre, post = "b.EncodeMessage(", ")" - canFail = true - case descriptor.FieldDescriptorProto_TYPE_BYTES: - wire = "WireBytes" - pre, post = "b.EncodeRawBytes(", ")" - case descriptor.FieldDescriptorProto_TYPE_SINT32: - wire = "WireVarint" - pre, post = "b.EncodeZigzag32(uint64(", "))" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - wire = "WireVarint" - pre, post = "b.EncodeZigzag64(uint64(", "))" - default: - g.Fail("unhandled oneof field type ", field.Type.String()) - } - fieldWire[field] = wire - g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") - if !canFail { - g.P(pre, val, post) - } else { - g.P("if err := ", pre, val, post, "; err != nil {") - g.P("return err") - g.P("}") - } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") - } - } - g.P("case nil:") - g.P("default: return ", g.Pkg["fmt"], `.Errorf("`, ccTypeName, ".", fname, ` has unexpected type %T", x)`) - g.P("}") - } - g.P("return nil") - g.P("}") - g.P() - - // unmarshaler - g.P("func ", dec, decSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - g.P("switch tag {") - for _, field := range message.Field { - if field.OneofIndex == nil { - continue - } - odp := message.OneofDecl[int(*field.OneofIndex)] - g.P("case ", field.Number, ": // ", odp.GetName(), ".", *field.Name) - g.P("if wire != ", g.Pkg["proto"], ".", fieldWire[field], " {") - g.P("return true, ", g.Pkg["proto"], ".ErrInternalBadWireType") - g.P("}") - lhs := "x, err" // overridden for TYPE_MESSAGE and TYPE_GROUP - var dec, cast, cast2 string - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - dec, cast = "b.DecodeFixed64()", g.Pkg["math"]+".Float64frombits" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - dec, cast, cast2 = "b.DecodeFixed32()", "uint32", g.Pkg["math"]+".Float32frombits" - case descriptor.FieldDescriptorProto_TYPE_INT64: - dec, cast = "b.DecodeVarint()", "int64" - case descriptor.FieldDescriptorProto_TYPE_UINT64: - dec = "b.DecodeVarint()" - case descriptor.FieldDescriptorProto_TYPE_INT32: - dec, cast = "b.DecodeVarint()", "int32" - case descriptor.FieldDescriptorProto_TYPE_FIXED64: - dec = "b.DecodeFixed64()" - case descriptor.FieldDescriptorProto_TYPE_FIXED32: - dec, cast = "b.DecodeFixed32()", "uint32" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - dec = "b.DecodeVarint()" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_STRING: - dec = "b.DecodeStringBytes()" - case descriptor.FieldDescriptorProto_TYPE_GROUP: - g.P("msg := new(", fieldTypes[field][1:], ")") // drop star - lhs = "err" - dec = "b.DecodeGroup(msg)" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - g.P("msg := new(", fieldTypes[field][1:], ")") // drop star - lhs = "err" - dec = "b.DecodeMessage(msg)" - // handled specially below - case descriptor.FieldDescriptorProto_TYPE_BYTES: - dec = "b.DecodeRawBytes(true)" - case descriptor.FieldDescriptorProto_TYPE_UINT32: - dec, cast = "b.DecodeVarint()", "uint32" - case descriptor.FieldDescriptorProto_TYPE_ENUM: - dec, cast = "b.DecodeVarint()", fieldTypes[field] - case descriptor.FieldDescriptorProto_TYPE_SFIXED32: - dec, cast = "b.DecodeFixed32()", "int32" - case descriptor.FieldDescriptorProto_TYPE_SFIXED64: - dec, cast = "b.DecodeFixed64()", "int64" - case descriptor.FieldDescriptorProto_TYPE_SINT32: - dec, cast = "b.DecodeZigzag32()", "int32" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - dec, cast = "b.DecodeZigzag64()", "int64" - default: - g.Fail("unhandled oneof field type ", field.Type.String()) - } - g.P(lhs, " := ", dec) - val := "x" - if cast != "" { - val = cast + "(" + val + ")" - } - if cast2 != "" { - val = cast2 + "(" + val + ")" - } - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_BOOL: - val += " != 0" - case descriptor.FieldDescriptorProto_TYPE_GROUP, - descriptor.FieldDescriptorProto_TYPE_MESSAGE: - val = "msg" - } - g.P("m.", oneofFieldName[*field.OneofIndex], " = &", oneofTypeName[field], "{", val, "}") - g.P("return true, err") - } - g.P("default: return false, nil") - g.P("}") - g.P("}") - g.P() - - // sizer - g.P("func ", size, sizeSig, " {") - g.P("m := msg.(*", ccTypeName, ")") - for oi, odp := range message.OneofDecl { - g.P("// ", odp.GetName()) - fname := oneofFieldName[int32(oi)] - g.P("switch x := m.", fname, ".(type) {") - for _, field := range message.Field { - if field.OneofIndex == nil || int(*field.OneofIndex) != oi { - continue - } - g.P("case *", oneofTypeName[field], ":") - val := "x." + fieldNames[field] - var wire, varint, fixed string - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE: - wire = "WireFixed64" - fixed = "8" - case descriptor.FieldDescriptorProto_TYPE_FLOAT: - wire = "WireFixed32" - fixed = "4" - case descriptor.FieldDescriptorProto_TYPE_INT64, - descriptor.FieldDescriptorProto_TYPE_UINT64, - descriptor.FieldDescriptorProto_TYPE_INT32, - descriptor.FieldDescriptorProto_TYPE_UINT32, - descriptor.FieldDescriptorProto_TYPE_ENUM: - wire = "WireVarint" - varint = val - case descriptor.FieldDescriptorProto_TYPE_FIXED64, - descriptor.FieldDescriptorProto_TYPE_SFIXED64: - wire = "WireFixed64" - fixed = "8" - case descriptor.FieldDescriptorProto_TYPE_FIXED32, - descriptor.FieldDescriptorProto_TYPE_SFIXED32: - wire = "WireFixed32" - fixed = "4" - case descriptor.FieldDescriptorProto_TYPE_BOOL: - wire = "WireVarint" - fixed = "1" - case descriptor.FieldDescriptorProto_TYPE_STRING: - wire = "WireBytes" - fixed = "len(" + val + ")" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_GROUP: - wire = "WireStartGroup" - fixed = g.Pkg["proto"] + ".Size(" + val + ")" - case descriptor.FieldDescriptorProto_TYPE_MESSAGE: - wire = "WireBytes" - g.P("s := ", g.Pkg["proto"], ".Size(", val, ")") - fixed = "s" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_BYTES: - wire = "WireBytes" - fixed = "len(" + val + ")" - varint = fixed - case descriptor.FieldDescriptorProto_TYPE_SINT32: - wire = "WireVarint" - varint = "(uint32(" + val + ") << 1) ^ uint32((int32(" + val + ") >> 31))" - case descriptor.FieldDescriptorProto_TYPE_SINT64: - wire = "WireVarint" - varint = "uint64(" + val + " << 1) ^ uint64((int64(" + val + ") >> 63))" - default: - g.Fail("unhandled oneof field type ", field.Type.String()) - } - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") - if varint != "" { - g.P("n += ", g.Pkg["proto"], ".SizeVarint(uint64(", varint, "))") - } - if fixed != "" { - g.P("n += ", fixed) - } - if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { - g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") - } - } - g.P("case nil:") - g.P("default:") - g.P("panic(", g.Pkg["fmt"], ".Sprintf(\"proto: unexpected type %T in oneof\", x))") - g.P("}") - } - g.P("return n") - g.P("}") - g.P() - } - - for _, ext := range message.ext { - g.generateExtension(ext) - } - - fullName := strings.Join(message.TypeName(), ".") - if g.file.Package != nil { - fullName = *g.file.Package + "." + fullName - } - - g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], ccTypeName, fullName) -} - -func (g *Generator) generateExtension(ext *ExtensionDescriptor) { - ccTypeName := ext.DescName() - - extObj := g.ObjectNamed(*ext.Extendee) - var extDesc *Descriptor - if id, ok := extObj.(*ImportedDescriptor); ok { - // This is extending a publicly imported message. - // We need the underlying type for goTag. - extDesc = id.o.(*Descriptor) - } else { - extDesc = extObj.(*Descriptor) - } - extendedType := "*" + g.TypeName(extObj) // always use the original - field := ext.FieldDescriptorProto - fieldType, wireType := g.GoType(ext.parent, field) - tag := g.goTag(extDesc, field, wireType) - g.RecordTypeUse(*ext.Extendee) - if n := ext.FieldDescriptorProto.TypeName; n != nil { - // foreign extension type - g.RecordTypeUse(*n) - } - - typeName := ext.TypeName() - - // Special case for proto2 message sets: If this extension is extending - // proto2_bridge.MessageSet, and its final name component is "message_set_extension", - // then drop that last component. - mset := false - if extendedType == "*proto2_bridge.MessageSet" && typeName[len(typeName)-1] == "message_set_extension" { - typeName = typeName[:len(typeName)-1] - mset = true - } - - // For text formatting, the package must be exactly what the .proto file declares, - // ignoring overrides such as the go_package option, and with no dot/underscore mapping. - extName := strings.Join(typeName, ".") - if g.file.Package != nil { - extName = *g.file.Package + "." + extName - } - - g.P("var ", ccTypeName, " = &", g.Pkg["proto"], ".ExtensionDesc{") - g.In() - g.P("ExtendedType: (", extendedType, ")(nil),") - g.P("ExtensionType: (", fieldType, ")(nil),") - g.P("Field: ", field.Number, ",") - g.P(`Name: "`, extName, `",`) - g.P("Tag: ", tag, ",") - g.P(`Filename: "`, g.file.GetName(), `",`) - - g.Out() - g.P("}") - g.P() - - if mset { - // Generate a bit more code to register with message_set.go. - g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["proto"], fieldType, *field.Number, extName) - } - - g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""}) -} - -func (g *Generator) generateInitFunction() { - for _, enum := range g.file.enum { - g.generateEnumRegistration(enum) - } - for _, d := range g.file.desc { - for _, ext := range d.ext { - g.generateExtensionRegistration(ext) - } - } - for _, ext := range g.file.ext { - g.generateExtensionRegistration(ext) - } - if len(g.init) == 0 { - return - } - g.P("func init() {") - g.In() - for _, l := range g.init { - g.P(l) - } - g.Out() - g.P("}") - g.init = nil -} - -func (g *Generator) generateFileDescriptor(file *FileDescriptor) { - // Make a copy and trim source_code_info data. - // TODO: Trim this more when we know exactly what we need. - pb := proto.Clone(file.FileDescriptorProto).(*descriptor.FileDescriptorProto) - pb.SourceCodeInfo = nil - - b, err := proto.Marshal(pb) - if err != nil { - g.Fail(err.Error()) - } - - var buf bytes.Buffer - w, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression) - w.Write(b) - w.Close() - b = buf.Bytes() - - v := file.VarName() - g.P() - g.P("func init() { ", g.Pkg["proto"], ".RegisterFile(", strconv.Quote(*file.Name), ", ", v, ") }") - g.P("var ", v, " = []byte{") - g.In() - g.P("// ", len(b), " bytes of a gzipped FileDescriptorProto") - for len(b) > 0 { - n := 16 - if n > len(b) { - n = len(b) - } - - s := "" - for _, c := range b[:n] { - s += fmt.Sprintf("0x%02x,", c) - } - g.P(s) - - b = b[n:] - } - g.Out() - g.P("}") -} - -func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) { - // // We always print the full (proto-world) package name here. - pkg := enum.File().GetPackage() - if pkg != "" { - pkg += "." - } - // The full type name - typeName := enum.TypeName() - // The full type name, CamelCased. - ccTypeName := CamelCaseSlice(typeName) - g.addInitf("%s.RegisterEnum(%q, %[3]s_name, %[3]s_value)", g.Pkg["proto"], pkg+ccTypeName, ccTypeName) -} - -func (g *Generator) generateExtensionRegistration(ext *ExtensionDescriptor) { - g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName()) -} - -// And now lots of helper functions. - -// Is c an ASCII lower-case letter? -func isASCIILower(c byte) bool { - return 'a' <= c && c <= 'z' -} - -// Is c an ASCII digit? -func isASCIIDigit(c byte) bool { - return '0' <= c && c <= '9' -} - -// CamelCase returns the CamelCased name. -// If there is an interior underscore followed by a lower case letter, -// drop the underscore and convert the letter to upper case. -// There is a remote possibility of this rewrite causing a name collision, -// but it's so remote we're prepared to pretend it's nonexistent - since the -// C++ generator lowercases names, it's extremely unlikely to have two fields -// with different capitalizations. -// In short, _my_field_name_2 becomes XMyFieldName_2. -func CamelCase(s string) string { - if s == "" { - return "" - } - t := make([]byte, 0, 32) - i := 0 - if s[0] == '_' { - // Need a capital letter; drop the '_'. - t = append(t, 'X') - i++ - } - // Invariant: if the next letter is lower case, it must be converted - // to upper case. - // That is, we process a word at a time, where words are marked by _ or - // upper case letter. Digits are treated as words. - for ; i < len(s); i++ { - c := s[i] - if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) { - continue // Skip the underscore in s. - } - if isASCIIDigit(c) { - t = append(t, c) - continue - } - // Assume we have a letter now - if not, it's a bogus identifier. - // The next word is a sequence of characters that must start upper case. - if isASCIILower(c) { - c ^= ' ' // Make it a capital letter. - } - t = append(t, c) // Guaranteed not lower case. - // Accept lower case sequence that follows. - for i+1 < len(s) && isASCIILower(s[i+1]) { - i++ - t = append(t, s[i]) - } - } - return string(t) -} - -// CamelCaseSlice is like CamelCase, but the argument is a slice of strings to -// be joined with "_". -func CamelCaseSlice(elem []string) string { return CamelCase(strings.Join(elem, "_")) } - -// dottedSlice turns a sliced name into a dotted name. -func dottedSlice(elem []string) string { return strings.Join(elem, ".") } - -// Is this field optional? -func isOptional(field *descriptor.FieldDescriptorProto) bool { - return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_OPTIONAL -} - -// Is this field required? -func isRequired(field *descriptor.FieldDescriptorProto) bool { - return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REQUIRED -} - -// Is this field repeated? -func isRepeated(field *descriptor.FieldDescriptorProto) bool { - return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED -} - -// Is this field a scalar numeric type? -func isScalar(field *descriptor.FieldDescriptorProto) bool { - if field.Type == nil { - return false - } - switch *field.Type { - case descriptor.FieldDescriptorProto_TYPE_DOUBLE, - descriptor.FieldDescriptorProto_TYPE_FLOAT, - descriptor.FieldDescriptorProto_TYPE_INT64, - descriptor.FieldDescriptorProto_TYPE_UINT64, - descriptor.FieldDescriptorProto_TYPE_INT32, - descriptor.FieldDescriptorProto_TYPE_FIXED64, - descriptor.FieldDescriptorProto_TYPE_FIXED32, - descriptor.FieldDescriptorProto_TYPE_BOOL, - descriptor.FieldDescriptorProto_TYPE_UINT32, - descriptor.FieldDescriptorProto_TYPE_ENUM, - descriptor.FieldDescriptorProto_TYPE_SFIXED32, - descriptor.FieldDescriptorProto_TYPE_SFIXED64, - descriptor.FieldDescriptorProto_TYPE_SINT32, - descriptor.FieldDescriptorProto_TYPE_SINT64: - return true - default: - return false - } -} - -// badToUnderscore is the mapping function used to generate Go names from package names, -// which can be dotted in the input .proto file. It replaces non-identifier characters such as -// dot or dash with underscore. -func badToUnderscore(r rune) rune { - if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' { - return r - } - return '_' -} - -// baseName returns the last path element of the name, with the last dotted suffix removed. -func baseName(name string) string { - // First, find the last element - if i := strings.LastIndex(name, "/"); i >= 0 { - name = name[i+1:] - } - // Now drop the suffix - if i := strings.LastIndex(name, "."); i >= 0 { - name = name[0:i] - } - return name -} - -// The SourceCodeInfo message describes the location of elements of a parsed -// .proto file by way of a "path", which is a sequence of integers that -// describe the route from a FileDescriptorProto to the relevant submessage. -// The path alternates between a field number of a repeated field, and an index -// into that repeated field. The constants below define the field numbers that -// are used. -// -// See descriptor.proto for more information about this. -const ( - // tag numbers in FileDescriptorProto - packagePath = 2 // package - messagePath = 4 // message_type - enumPath = 5 // enum_type - // tag numbers in DescriptorProto - messageFieldPath = 2 // field - messageMessagePath = 3 // nested_type - messageEnumPath = 4 // enum_type - messageOneofPath = 8 // oneof_decl - // tag numbers in EnumDescriptorProto - enumValuePath = 2 // value -) diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile deleted file mode 100644 index eb41f20..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile +++ /dev/null @@ -1,45 +0,0 @@ -# Go support for Protocol Buffers - Google's data interchange format -# -# Copyright 2010 The Go Authors. All rights reserved. -# https://github.com/golang/protobuf -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Not stored here, but plugin.proto is in https://github.com/google/protobuf/ -# at src/google/protobuf/compiler/plugin.proto -# Also we need to fix an import. -regenerate: - echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION - protoc --go_out=Mgoogle/protobuf/descriptor.proto=github.com/golang/protobuf/protoc-gen-go/descriptor:. \ - -I$(HOME)/src/protobuf/src $(HOME)/src/protobuf/src/google/protobuf/compiler/plugin.proto && \ - mv google/protobuf/compiler/plugin.pb.go $(GOPATH)/src/github.com/golang/protobuf/protoc-gen-go/plugin - -restore: - cp plugin.pb.golden plugin.pb.go - -preserve: - cp plugin.pb.go plugin.pb.golden diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go deleted file mode 100644 index 0ff4e13..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go +++ /dev/null @@ -1,229 +0,0 @@ -// Code generated by protoc-gen-go. -// source: google/protobuf/compiler/plugin.proto -// DO NOT EDIT! - -/* -Package plugin_go is a generated protocol buffer package. - -It is generated from these files: - google/protobuf/compiler/plugin.proto - -It has these top-level messages: - CodeGeneratorRequest - CodeGeneratorResponse -*/ -package plugin_go - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -// An encoded CodeGeneratorRequest is written to the plugin's stdin. -type CodeGeneratorRequest struct { - // The .proto files that were explicitly listed on the command-line. The - // code generator should generate code only for these files. Each file's - // descriptor will be included in proto_file, below. - FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate,json=fileToGenerate" json:"file_to_generate,omitempty"` - // The generator parameter passed on the command-line. - Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"` - // FileDescriptorProtos for all files in files_to_generate and everything - // they import. The files will appear in topological order, so each file - // appears before any file that imports it. - // - // protoc guarantees that all proto_files will be written after - // the fields above, even though this is not technically guaranteed by the - // protobuf wire format. This theoretically could allow a plugin to stream - // in the FileDescriptorProtos and handle them one by one rather than read - // the entire set into memory at once. However, as of this writing, this - // is not similarly optimized on protoc's end -- it will store all fields in - // memory at once before sending them to the plugin. - ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} } -func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) } -func (*CodeGeneratorRequest) ProtoMessage() {} -func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *CodeGeneratorRequest) GetFileToGenerate() []string { - if m != nil { - return m.FileToGenerate - } - return nil -} - -func (m *CodeGeneratorRequest) GetParameter() string { - if m != nil && m.Parameter != nil { - return *m.Parameter - } - return "" -} - -func (m *CodeGeneratorRequest) GetProtoFile() []*google_protobuf.FileDescriptorProto { - if m != nil { - return m.ProtoFile - } - return nil -} - -// The plugin writes an encoded CodeGeneratorResponse to stdout. -type CodeGeneratorResponse struct { - // Error message. If non-empty, code generation failed. The plugin process - // should exit with status code zero even if it reports an error in this way. - // - // This should be used to indicate errors in .proto files which prevent the - // code generator from generating correct code. Errors which indicate a - // problem in protoc itself -- such as the input CodeGeneratorRequest being - // unparseable -- should be reported by writing a message to stderr and - // exiting with a non-zero status code. - Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` - File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} } -func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) } -func (*CodeGeneratorResponse) ProtoMessage() {} -func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } - -func (m *CodeGeneratorResponse) GetError() string { - if m != nil && m.Error != nil { - return *m.Error - } - return "" -} - -func (m *CodeGeneratorResponse) GetFile() []*CodeGeneratorResponse_File { - if m != nil { - return m.File - } - return nil -} - -// Represents a single generated file. -type CodeGeneratorResponse_File struct { - // The file name, relative to the output directory. The name must not - // contain "." or ".." components and must be relative, not be absolute (so, - // the file cannot lie outside the output directory). "/" must be used as - // the path separator, not "\". - // - // If the name is omitted, the content will be appended to the previous - // file. This allows the generator to break large files into small chunks, - // and allows the generated text to be streamed back to protoc so that large - // files need not reside completely in memory at one time. Note that as of - // this writing protoc does not optimize for this -- it will read the entire - // CodeGeneratorResponse before writing files to disk. - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - // If non-empty, indicates that the named file should already exist, and the - // content here is to be inserted into that file at a defined insertion - // point. This feature allows a code generator to extend the output - // produced by another code generator. The original generator may provide - // insertion points by placing special annotations in the file that look - // like: - // @@protoc_insertion_point(NAME) - // The annotation can have arbitrary text before and after it on the line, - // which allows it to be placed in a comment. NAME should be replaced with - // an identifier naming the point -- this is what other generators will use - // as the insertion_point. Code inserted at this point will be placed - // immediately above the line containing the insertion point (thus multiple - // insertions to the same point will come out in the order they were added). - // The double-@ is intended to make it unlikely that the generated code - // could contain things that look like insertion points by accident. - // - // For example, the C++ code generator places the following line in the - // .pb.h files that it generates: - // // @@protoc_insertion_point(namespace_scope) - // This line appears within the scope of the file's package namespace, but - // outside of any particular class. Another plugin can then specify the - // insertion_point "namespace_scope" to generate additional classes or - // other declarations that should be placed in this scope. - // - // Note that if the line containing the insertion point begins with - // whitespace, the same whitespace will be added to every line of the - // inserted text. This is useful for languages like Python, where - // indentation matters. In these languages, the insertion point comment - // should be indented the same amount as any inserted code will need to be - // in order to work correctly in that context. - // - // The code generator that generates the initial file and the one which - // inserts into it must both run as part of a single invocation of protoc. - // Code generators are executed in the order in which they appear on the - // command line. - // - // If |insertion_point| is present, |name| must also be present. - InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"` - // The file contents. - Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} } -func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) } -func (*CodeGeneratorResponse_File) ProtoMessage() {} -func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1, 0} } - -func (m *CodeGeneratorResponse_File) GetName() string { - if m != nil && m.Name != nil { - return *m.Name - } - return "" -} - -func (m *CodeGeneratorResponse_File) GetInsertionPoint() string { - if m != nil && m.InsertionPoint != nil { - return *m.InsertionPoint - } - return "" -} - -func (m *CodeGeneratorResponse_File) GetContent() string { - if m != nil && m.Content != nil { - return *m.Content - } - return "" -} - -func init() { - proto.RegisterType((*CodeGeneratorRequest)(nil), "google.protobuf.compiler.CodeGeneratorRequest") - proto.RegisterType((*CodeGeneratorResponse)(nil), "google.protobuf.compiler.CodeGeneratorResponse") - proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File") -} - -func init() { proto.RegisterFile("google/protobuf/compiler/plugin.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 310 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x74, 0x51, 0xc1, 0x4a, 0xc3, 0x40, - 0x10, 0x25, 0xb6, 0x22, 0x19, 0xa5, 0x95, 0xa5, 0xc2, 0x52, 0x7a, 0x08, 0x45, 0x31, 0xa7, 0x14, - 0x44, 0xf0, 0xde, 0x8a, 0x7a, 0x2c, 0xc1, 0x93, 0x20, 0x21, 0xa6, 0xd3, 0xb0, 0x90, 0xec, 0xac, - 0xb3, 0xdb, 0x2f, 0xf2, 0x9f, 0xfc, 0x1e, 0xd9, 0x4d, 0x5b, 0xa5, 0xd8, 0xdb, 0xce, 0x7b, 0x6f, - 0xe6, 0xbd, 0x9d, 0x81, 0x9b, 0x9a, 0xa8, 0x6e, 0x70, 0x66, 0x98, 0x1c, 0x7d, 0x6c, 0xd6, 0xb3, - 0x8a, 0x5a, 0xa3, 0x1a, 0xe4, 0x99, 0x69, 0x36, 0xb5, 0xd2, 0x59, 0x20, 0x84, 0xec, 0x64, 0xd9, - 0x4e, 0x96, 0xed, 0x64, 0xe3, 0xe4, 0x70, 0xc0, 0x0a, 0x6d, 0xc5, 0xca, 0x38, 0xe2, 0x4e, 0x3d, - 0xfd, 0x8a, 0x60, 0xb4, 0xa0, 0x15, 0x3e, 0xa3, 0x46, 0x2e, 0x1d, 0x71, 0x8e, 0x9f, 0x1b, 0xb4, - 0x4e, 0xa4, 0x70, 0xb9, 0x56, 0x0d, 0x16, 0x8e, 0x8a, 0xba, 0xe3, 0x50, 0x46, 0x49, 0x2f, 0x8d, - 0xf3, 0x81, 0xc7, 0x5f, 0x69, 0xdb, 0x81, 0x62, 0x02, 0xb1, 0x29, 0xb9, 0x6c, 0xd1, 0x21, 0xcb, - 0x93, 0x24, 0x4a, 0xe3, 0xfc, 0x17, 0x10, 0x0b, 0x80, 0xe0, 0x54, 0xf8, 0x2e, 0x39, 0x4c, 0x7a, - 0xe9, 0xf9, 0xdd, 0x75, 0x76, 0x98, 0xf8, 0x49, 0x35, 0xf8, 0xb8, 0xcf, 0xb6, 0xf4, 0x70, 0x1e, - 0x07, 0xd6, 0x33, 0xd3, 0xef, 0x08, 0xae, 0x0e, 0x52, 0x5a, 0x43, 0xda, 0xa2, 0x18, 0xc1, 0x29, - 0x32, 0x13, 0xcb, 0x28, 0x18, 0x77, 0x85, 0x78, 0x81, 0xfe, 0x1f, 0xbb, 0xfb, 0xec, 0xd8, 0x82, - 0xb2, 0x7f, 0x87, 0x86, 0x34, 0x79, 0x98, 0x30, 0x7e, 0x87, 0xbe, 0xaf, 0x84, 0x80, 0xbe, 0x2e, - 0x5b, 0xdc, 0xda, 0x84, 0xb7, 0xb8, 0x85, 0xa1, 0xd2, 0x16, 0xd9, 0x29, 0xd2, 0x85, 0x21, 0xa5, - 0xdd, 0xf6, 0xfb, 0x83, 0x3d, 0xbc, 0xf4, 0xa8, 0x90, 0x70, 0x56, 0x91, 0x76, 0xa8, 0x9d, 0x1c, - 0x06, 0xc1, 0xae, 0x9c, 0x3f, 0xc0, 0xa4, 0xa2, 0xf6, 0x68, 0xbe, 0xf9, 0xc5, 0x32, 0x1c, 0x3a, - 0x2c, 0xc4, 0xbe, 0xc5, 0xdd, 0xd9, 0x8b, 0x9a, 0x7e, 0x02, 0x00, 0x00, 0xff, 0xff, 0x83, 0x7b, - 0x5c, 0x7c, 0x1b, 0x02, 0x00, 0x00, -} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden deleted file mode 100644 index 8953d0f..0000000 --- a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden +++ /dev/null @@ -1,83 +0,0 @@ -// Code generated by protoc-gen-go. -// source: google/protobuf/compiler/plugin.proto -// DO NOT EDIT! - -package google_protobuf_compiler - -import proto "github.com/golang/protobuf/proto" -import "math" -import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" - -// Reference proto and math imports to suppress error if they are not otherwise used. -var _ = proto.GetString -var _ = math.Inf - -type CodeGeneratorRequest struct { - FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate" json:"file_to_generate,omitempty"` - Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"` - ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file" json:"proto_file,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (this *CodeGeneratorRequest) Reset() { *this = CodeGeneratorRequest{} } -func (this *CodeGeneratorRequest) String() string { return proto.CompactTextString(this) } -func (*CodeGeneratorRequest) ProtoMessage() {} - -func (this *CodeGeneratorRequest) GetParameter() string { - if this != nil && this.Parameter != nil { - return *this.Parameter - } - return "" -} - -type CodeGeneratorResponse struct { - Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` - File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (this *CodeGeneratorResponse) Reset() { *this = CodeGeneratorResponse{} } -func (this *CodeGeneratorResponse) String() string { return proto.CompactTextString(this) } -func (*CodeGeneratorResponse) ProtoMessage() {} - -func (this *CodeGeneratorResponse) GetError() string { - if this != nil && this.Error != nil { - return *this.Error - } - return "" -} - -type CodeGeneratorResponse_File struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point" json:"insertion_point,omitempty"` - Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` - XXX_unrecognized []byte `json:"-"` -} - -func (this *CodeGeneratorResponse_File) Reset() { *this = CodeGeneratorResponse_File{} } -func (this *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(this) } -func (*CodeGeneratorResponse_File) ProtoMessage() {} - -func (this *CodeGeneratorResponse_File) GetName() string { - if this != nil && this.Name != nil { - return *this.Name - } - return "" -} - -func (this *CodeGeneratorResponse_File) GetInsertionPoint() string { - if this != nil && this.InsertionPoint != nil { - return *this.InsertionPoint - } - return "" -} - -func (this *CodeGeneratorResponse_File) GetContent() string { - if this != nil && this.Content != nil { - return *this.Content - } - return "" -} - -func init() { -} diff --git a/vendor/github.com/gorilla/context/LICENSE b/vendor/github.com/gorilla/context/LICENSE deleted file mode 100644 index 0e5fb87..0000000 --- a/vendor/github.com/gorilla/context/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 Rodrigo Moraes. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/context/README.md b/vendor/github.com/gorilla/context/README.md deleted file mode 100644 index c60a31b..0000000 --- a/vendor/github.com/gorilla/context/README.md +++ /dev/null @@ -1,7 +0,0 @@ -context -======= -[![Build Status](https://travis-ci.org/gorilla/context.png?branch=master)](https://travis-ci.org/gorilla/context) - -gorilla/context is a general purpose registry for global request variables. - -Read the full documentation here: http://www.gorillatoolkit.org/pkg/context diff --git a/vendor/github.com/gorilla/context/context.go b/vendor/github.com/gorilla/context/context.go deleted file mode 100644 index 81cb128..0000000 --- a/vendor/github.com/gorilla/context/context.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package context - -import ( - "net/http" - "sync" - "time" -) - -var ( - mutex sync.RWMutex - data = make(map[*http.Request]map[interface{}]interface{}) - datat = make(map[*http.Request]int64) -) - -// Set stores a value for a given key in a given request. -func Set(r *http.Request, key, val interface{}) { - mutex.Lock() - if data[r] == nil { - data[r] = make(map[interface{}]interface{}) - datat[r] = time.Now().Unix() - } - data[r][key] = val - mutex.Unlock() -} - -// Get returns a value stored for a given key in a given request. -func Get(r *http.Request, key interface{}) interface{} { - mutex.RLock() - if ctx := data[r]; ctx != nil { - value := ctx[key] - mutex.RUnlock() - return value - } - mutex.RUnlock() - return nil -} - -// GetOk returns stored value and presence state like multi-value return of map access. -func GetOk(r *http.Request, key interface{}) (interface{}, bool) { - mutex.RLock() - if _, ok := data[r]; ok { - value, ok := data[r][key] - mutex.RUnlock() - return value, ok - } - mutex.RUnlock() - return nil, false -} - -// GetAll returns all stored values for the request as a map. Nil is returned for invalid requests. -func GetAll(r *http.Request) map[interface{}]interface{} { - mutex.RLock() - if context, ok := data[r]; ok { - result := make(map[interface{}]interface{}, len(context)) - for k, v := range context { - result[k] = v - } - mutex.RUnlock() - return result - } - mutex.RUnlock() - return nil -} - -// GetAllOk returns all stored values for the request as a map and a boolean value that indicates if -// the request was registered. -func GetAllOk(r *http.Request) (map[interface{}]interface{}, bool) { - mutex.RLock() - context, ok := data[r] - result := make(map[interface{}]interface{}, len(context)) - for k, v := range context { - result[k] = v - } - mutex.RUnlock() - return result, ok -} - -// Delete removes a value stored for a given key in a given request. -func Delete(r *http.Request, key interface{}) { - mutex.Lock() - if data[r] != nil { - delete(data[r], key) - } - mutex.Unlock() -} - -// Clear removes all values stored for a given request. -// -// This is usually called by a handler wrapper to clean up request -// variables at the end of a request lifetime. See ClearHandler(). -func Clear(r *http.Request) { - mutex.Lock() - clear(r) - mutex.Unlock() -} - -// clear is Clear without the lock. -func clear(r *http.Request) { - delete(data, r) - delete(datat, r) -} - -// Purge removes request data stored for longer than maxAge, in seconds. -// It returns the amount of requests removed. -// -// If maxAge <= 0, all request data is removed. -// -// This is only used for sanity check: in case context cleaning was not -// properly set some request data can be kept forever, consuming an increasing -// amount of memory. In case this is detected, Purge() must be called -// periodically until the problem is fixed. -func Purge(maxAge int) int { - mutex.Lock() - count := 0 - if maxAge <= 0 { - count = len(data) - data = make(map[*http.Request]map[interface{}]interface{}) - datat = make(map[*http.Request]int64) - } else { - min := time.Now().Unix() - int64(maxAge) - for r := range data { - if datat[r] < min { - clear(r) - count++ - } - } - } - mutex.Unlock() - return count -} - -// ClearHandler wraps an http.Handler and clears request values at the end -// of a request lifetime. -func ClearHandler(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - defer Clear(r) - h.ServeHTTP(w, r) - }) -} diff --git a/vendor/github.com/gorilla/context/doc.go b/vendor/github.com/gorilla/context/doc.go deleted file mode 100644 index 73c7400..0000000 --- a/vendor/github.com/gorilla/context/doc.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package context stores values shared during a request lifetime. - -For example, a router can set variables extracted from the URL and later -application handlers can access those values, or it can be used to store -sessions values to be saved at the end of a request. There are several -others common uses. - -The idea was posted by Brad Fitzpatrick to the go-nuts mailing list: - - http://groups.google.com/group/golang-nuts/msg/e2d679d303aa5d53 - -Here's the basic usage: first define the keys that you will need. The key -type is interface{} so a key can be of any type that supports equality. -Here we define a key using a custom int type to avoid name collisions: - - package foo - - import ( - "github.com/gorilla/context" - ) - - type key int - - const MyKey key = 0 - -Then set a variable. Variables are bound to an http.Request object, so you -need a request instance to set a value: - - context.Set(r, MyKey, "bar") - -The application can later access the variable using the same key you provided: - - func MyHandler(w http.ResponseWriter, r *http.Request) { - // val is "bar". - val := context.Get(r, foo.MyKey) - - // returns ("bar", true) - val, ok := context.GetOk(r, foo.MyKey) - // ... - } - -And that's all about the basic usage. We discuss some other ideas below. - -Any type can be stored in the context. To enforce a given type, make the key -private and wrap Get() and Set() to accept and return values of a specific -type: - - type key int - - const mykey key = 0 - - // GetMyKey returns a value for this package from the request values. - func GetMyKey(r *http.Request) SomeType { - if rv := context.Get(r, mykey); rv != nil { - return rv.(SomeType) - } - return nil - } - - // SetMyKey sets a value for this package in the request values. - func SetMyKey(r *http.Request, val SomeType) { - context.Set(r, mykey, val) - } - -Variables must be cleared at the end of a request, to remove all values -that were stored. This can be done in an http.Handler, after a request was -served. Just call Clear() passing the request: - - context.Clear(r) - -...or use ClearHandler(), which conveniently wraps an http.Handler to clear -variables at the end of a request lifetime. - -The Routers from the packages gorilla/mux and gorilla/pat call Clear() -so if you are using either of them you don't need to clear the context manually. -*/ -package context diff --git a/vendor/github.com/gorilla/handlers/LICENSE b/vendor/github.com/gorilla/handlers/LICENSE deleted file mode 100644 index 66ea3c8..0000000 --- a/vendor/github.com/gorilla/handlers/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2013 The Gorilla Handlers Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/handlers/README.md b/vendor/github.com/gorilla/handlers/README.md deleted file mode 100644 index a782c41..0000000 --- a/vendor/github.com/gorilla/handlers/README.md +++ /dev/null @@ -1,53 +0,0 @@ -gorilla/handlers -================ -[![GoDoc](https://godoc.org/github.com/gorilla/handlers?status.svg)](https://godoc.org/github.com/gorilla/handlers) [![Build Status](https://travis-ci.org/gorilla/handlers.svg?branch=master)](https://travis-ci.org/gorilla/handlers) - -Package handlers is a collection of handlers (aka "HTTP middleware") for use -with Go's `net/http` package (or any framework supporting `http.Handler`), including: - -* [**LoggingHandler**](https://godoc.org/github.com/gorilla/handlers#LoggingHandler) for logging HTTP requests in the Apache [Common Log - Format](http://httpd.apache.org/docs/2.2/logs.html#common). -* [**CombinedLoggingHandler**](https://godoc.org/github.com/gorilla/handlers#CombinedLoggingHandler) for logging HTTP requests in the Apache [Combined Log - Format](http://httpd.apache.org/docs/2.2/logs.html#combined) commonly used by - both Apache and nginx. -* [**CompressHandler**](https://godoc.org/github.com/gorilla/handlers#CompressHandler) for gzipping responses. -* [**ContentTypeHandler**](https://godoc.org/github.com/gorilla/handlers#ContentTypeHandler) for validating requests against a list of accepted - content types. -* [**MethodHandler**](https://godoc.org/github.com/gorilla/handlers#MethodHandler) for matching HTTP methods against handlers in a - `map[string]http.Handler` -* [**ProxyHeaders**](https://godoc.org/github.com/gorilla/handlers#ProxyHeaders) for populating `r.RemoteAddr` and `r.URL.Scheme` based on the - `X-Forwarded-For`, `X-Real-IP`, `X-Forwarded-Proto` and RFC7239 `Forwarded` - headers when running a Go server behind a HTTP reverse proxy. -* [**CanonicalHost**](https://godoc.org/github.com/gorilla/handlers#CanonicalHost) for re-directing to the preferred host when handling multiple - domains (i.e. multiple CNAME aliases). -* [**RecoveryHandler**](https://godoc.org/github.com/gorilla/handlers#RecoveryHandler) for recovering from unexpected panics. - -Other handlers are documented [on the Gorilla -website](http://www.gorillatoolkit.org/pkg/handlers). - -## Example - -A simple example using `handlers.LoggingHandler` and `handlers.CompressHandler`: - -```go -import ( - "net/http" - "github.com/gorilla/handlers" -) - -func main() { - r := http.NewServeMux() - - // Only log requests to our admin dashboard to stdout - r.Handle("/admin", handlers.LoggingHandler(os.Stdout, http.HandlerFunc(ShowAdminDashboard))) - r.HandleFunc("/", ShowIndex) - - // Wrap our server with our gzip handler to gzip compress all responses. - http.ListenAndServe(":8000", handlers.CompressHandler(r)) -} -``` - -## License - -BSD licensed. See the included LICENSE file for details. - diff --git a/vendor/github.com/gorilla/handlers/canonical.go b/vendor/github.com/gorilla/handlers/canonical.go deleted file mode 100644 index 8437fef..0000000 --- a/vendor/github.com/gorilla/handlers/canonical.go +++ /dev/null @@ -1,74 +0,0 @@ -package handlers - -import ( - "net/http" - "net/url" - "strings" -) - -type canonical struct { - h http.Handler - domain string - code int -} - -// CanonicalHost is HTTP middleware that re-directs requests to the canonical -// domain. It accepts a domain and a status code (e.g. 301 or 302) and -// re-directs clients to this domain. The existing request path is maintained. -// -// Note: If the provided domain is considered invalid by url.Parse or otherwise -// returns an empty scheme or host, clients are not re-directed. -// -// Example: -// -// r := mux.NewRouter() -// canonical := handlers.CanonicalHost("http://www.gorillatoolkit.org", 302) -// r.HandleFunc("/route", YourHandler) -// -// log.Fatal(http.ListenAndServe(":7000", canonical(r))) -// -func CanonicalHost(domain string, code int) func(h http.Handler) http.Handler { - fn := func(h http.Handler) http.Handler { - return canonical{h, domain, code} - } - - return fn -} - -func (c canonical) ServeHTTP(w http.ResponseWriter, r *http.Request) { - dest, err := url.Parse(c.domain) - if err != nil { - // Call the next handler if the provided domain fails to parse. - c.h.ServeHTTP(w, r) - return - } - - if dest.Scheme == "" || dest.Host == "" { - // Call the next handler if the scheme or host are empty. - // Note that url.Parse won't fail on in this case. - c.h.ServeHTTP(w, r) - return - } - - if !strings.EqualFold(cleanHost(r.Host), dest.Host) { - // Re-build the destination URL - dest := dest.Scheme + "://" + dest.Host + r.URL.Path - if r.URL.RawQuery != "" { - dest += "?" + r.URL.RawQuery - } - http.Redirect(w, r, dest, c.code) - return - } - - c.h.ServeHTTP(w, r) -} - -// cleanHost cleans invalid Host headers by stripping anything after '/' or ' '. -// This is backported from Go 1.5 (in response to issue #11206) and attempts to -// mitigate malformed Host headers that do not match the format in RFC7230. -func cleanHost(in string) string { - if i := strings.IndexAny(in, " /"); i != -1 { - return in[:i] - } - return in -} diff --git a/vendor/github.com/gorilla/handlers/compress.go b/vendor/github.com/gorilla/handlers/compress.go deleted file mode 100644 index e8345d7..0000000 --- a/vendor/github.com/gorilla/handlers/compress.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2013 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package handlers - -import ( - "compress/flate" - "compress/gzip" - "io" - "net/http" - "strings" -) - -type compressResponseWriter struct { - io.Writer - http.ResponseWriter - http.Hijacker - http.Flusher - http.CloseNotifier -} - -func (w *compressResponseWriter) WriteHeader(c int) { - w.ResponseWriter.Header().Del("Content-Length") - w.ResponseWriter.WriteHeader(c) -} - -func (w *compressResponseWriter) Header() http.Header { - return w.ResponseWriter.Header() -} - -func (w *compressResponseWriter) Write(b []byte) (int, error) { - h := w.ResponseWriter.Header() - if h.Get("Content-Type") == "" { - h.Set("Content-Type", http.DetectContentType(b)) - } - h.Del("Content-Length") - - return w.Writer.Write(b) -} - -type flusher interface { - Flush() error -} - -func (w *compressResponseWriter) Flush() { - // Flush compressed data if compressor supports it. - if f, ok := w.Writer.(flusher); ok { - f.Flush() - } - // Flush HTTP response. - if w.Flusher != nil { - w.Flusher.Flush() - } -} - -// CompressHandler gzip compresses HTTP responses for clients that support it -// via the 'Accept-Encoding' header. -// -// Compressing TLS traffic may leak the page contents to an attacker if the -// page contains user input: http://security.stackexchange.com/a/102015/12208 -func CompressHandler(h http.Handler) http.Handler { - return CompressHandlerLevel(h, gzip.DefaultCompression) -} - -// CompressHandlerLevel gzip compresses HTTP responses with specified compression level -// for clients that support it via the 'Accept-Encoding' header. -// -// The compression level should be gzip.DefaultCompression, gzip.NoCompression, -// or any integer value between gzip.BestSpeed and gzip.BestCompression inclusive. -// gzip.DefaultCompression is used in case of invalid compression level. -func CompressHandlerLevel(h http.Handler, level int) http.Handler { - if level < gzip.DefaultCompression || level > gzip.BestCompression { - level = gzip.DefaultCompression - } - - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - L: - for _, enc := range strings.Split(r.Header.Get("Accept-Encoding"), ",") { - switch strings.TrimSpace(enc) { - case "gzip": - w.Header().Set("Content-Encoding", "gzip") - w.Header().Add("Vary", "Accept-Encoding") - - gw, _ := gzip.NewWriterLevel(w, level) - defer gw.Close() - - h, hok := w.(http.Hijacker) - if !hok { /* w is not Hijacker... oh well... */ - h = nil - } - - f, fok := w.(http.Flusher) - if !fok { - f = nil - } - - cn, cnok := w.(http.CloseNotifier) - if !cnok { - cn = nil - } - - w = &compressResponseWriter{ - Writer: gw, - ResponseWriter: w, - Hijacker: h, - Flusher: f, - CloseNotifier: cn, - } - - break L - case "deflate": - w.Header().Set("Content-Encoding", "deflate") - w.Header().Add("Vary", "Accept-Encoding") - - fw, _ := flate.NewWriter(w, level) - defer fw.Close() - - h, hok := w.(http.Hijacker) - if !hok { /* w is not Hijacker... oh well... */ - h = nil - } - - f, fok := w.(http.Flusher) - if !fok { - f = nil - } - - cn, cnok := w.(http.CloseNotifier) - if !cnok { - cn = nil - } - - w = &compressResponseWriter{ - Writer: fw, - ResponseWriter: w, - Hijacker: h, - Flusher: f, - CloseNotifier: cn, - } - - break L - } - } - - h.ServeHTTP(w, r) - }) -} diff --git a/vendor/github.com/gorilla/handlers/cors.go b/vendor/github.com/gorilla/handlers/cors.go deleted file mode 100644 index 1f92d1a..0000000 --- a/vendor/github.com/gorilla/handlers/cors.go +++ /dev/null @@ -1,317 +0,0 @@ -package handlers - -import ( - "net/http" - "strconv" - "strings" -) - -// CORSOption represents a functional option for configuring the CORS middleware. -type CORSOption func(*cors) error - -type cors struct { - h http.Handler - allowedHeaders []string - allowedMethods []string - allowedOrigins []string - allowedOriginValidator OriginValidator - exposedHeaders []string - maxAge int - ignoreOptions bool - allowCredentials bool -} - -// OriginValidator takes an origin string and returns whether or not that origin is allowed. -type OriginValidator func(string) bool - -var ( - defaultCorsMethods = []string{"GET", "HEAD", "POST"} - defaultCorsHeaders = []string{"Accept", "Accept-Language", "Content-Language", "Origin"} - // (WebKit/Safari v9 sends the Origin header by default in AJAX requests) -) - -const ( - corsOptionMethod string = "OPTIONS" - corsAllowOriginHeader string = "Access-Control-Allow-Origin" - corsExposeHeadersHeader string = "Access-Control-Expose-Headers" - corsMaxAgeHeader string = "Access-Control-Max-Age" - corsAllowMethodsHeader string = "Access-Control-Allow-Methods" - corsAllowHeadersHeader string = "Access-Control-Allow-Headers" - corsAllowCredentialsHeader string = "Access-Control-Allow-Credentials" - corsRequestMethodHeader string = "Access-Control-Request-Method" - corsRequestHeadersHeader string = "Access-Control-Request-Headers" - corsOriginHeader string = "Origin" - corsVaryHeader string = "Vary" - corsOriginMatchAll string = "*" -) - -func (ch *cors) ServeHTTP(w http.ResponseWriter, r *http.Request) { - origin := r.Header.Get(corsOriginHeader) - if !ch.isOriginAllowed(origin) { - ch.h.ServeHTTP(w, r) - return - } - - if r.Method == corsOptionMethod { - if ch.ignoreOptions { - ch.h.ServeHTTP(w, r) - return - } - - if _, ok := r.Header[corsRequestMethodHeader]; !ok { - w.WriteHeader(http.StatusBadRequest) - return - } - - method := r.Header.Get(corsRequestMethodHeader) - if !ch.isMatch(method, ch.allowedMethods) { - w.WriteHeader(http.StatusMethodNotAllowed) - return - } - - requestHeaders := strings.Split(r.Header.Get(corsRequestHeadersHeader), ",") - allowedHeaders := []string{} - for _, v := range requestHeaders { - canonicalHeader := http.CanonicalHeaderKey(strings.TrimSpace(v)) - if canonicalHeader == "" || ch.isMatch(canonicalHeader, defaultCorsHeaders) { - continue - } - - if !ch.isMatch(canonicalHeader, ch.allowedHeaders) { - w.WriteHeader(http.StatusForbidden) - return - } - - allowedHeaders = append(allowedHeaders, canonicalHeader) - } - - if len(allowedHeaders) > 0 { - w.Header().Set(corsAllowHeadersHeader, strings.Join(allowedHeaders, ",")) - } - - if ch.maxAge > 0 { - w.Header().Set(corsMaxAgeHeader, strconv.Itoa(ch.maxAge)) - } - - if !ch.isMatch(method, defaultCorsMethods) { - w.Header().Set(corsAllowMethodsHeader, method) - } - } else { - if len(ch.exposedHeaders) > 0 { - w.Header().Set(corsExposeHeadersHeader, strings.Join(ch.exposedHeaders, ",")) - } - } - - if ch.allowCredentials { - w.Header().Set(corsAllowCredentialsHeader, "true") - } - - if len(ch.allowedOrigins) > 1 { - w.Header().Set(corsVaryHeader, corsOriginHeader) - } - - w.Header().Set(corsAllowOriginHeader, origin) - - if r.Method == corsOptionMethod { - return - } - ch.h.ServeHTTP(w, r) -} - -// CORS provides Cross-Origin Resource Sharing middleware. -// Example: -// -// import ( -// "net/http" -// -// "github.com/gorilla/handlers" -// "github.com/gorilla/mux" -// ) -// -// func main() { -// r := mux.NewRouter() -// r.HandleFunc("/users", UserEndpoint) -// r.HandleFunc("/projects", ProjectEndpoint) -// -// // Apply the CORS middleware to our top-level router, with the defaults. -// http.ListenAndServe(":8000", handlers.CORS()(r)) -// } -// -func CORS(opts ...CORSOption) func(http.Handler) http.Handler { - return func(h http.Handler) http.Handler { - ch := parseCORSOptions(opts...) - ch.h = h - return ch - } -} - -func parseCORSOptions(opts ...CORSOption) *cors { - ch := &cors{ - allowedMethods: defaultCorsMethods, - allowedHeaders: defaultCorsHeaders, - allowedOrigins: []string{corsOriginMatchAll}, - } - - for _, option := range opts { - option(ch) - } - - return ch -} - -// -// Functional options for configuring CORS. -// - -// AllowedHeaders adds the provided headers to the list of allowed headers in a -// CORS request. -// This is an append operation so the headers Accept, Accept-Language, -// and Content-Language are always allowed. -// Content-Type must be explicitly declared if accepting Content-Types other than -// application/x-www-form-urlencoded, multipart/form-data, or text/plain. -func AllowedHeaders(headers []string) CORSOption { - return func(ch *cors) error { - for _, v := range headers { - normalizedHeader := http.CanonicalHeaderKey(strings.TrimSpace(v)) - if normalizedHeader == "" { - continue - } - - if !ch.isMatch(normalizedHeader, ch.allowedHeaders) { - ch.allowedHeaders = append(ch.allowedHeaders, normalizedHeader) - } - } - - return nil - } -} - -// AllowedMethods can be used to explicitly allow methods in the -// Access-Control-Allow-Methods header. -// This is a replacement operation so you must also -// pass GET, HEAD, and POST if you wish to support those methods. -func AllowedMethods(methods []string) CORSOption { - return func(ch *cors) error { - ch.allowedMethods = []string{} - for _, v := range methods { - normalizedMethod := strings.ToUpper(strings.TrimSpace(v)) - if normalizedMethod == "" { - continue - } - - if !ch.isMatch(normalizedMethod, ch.allowedMethods) { - ch.allowedMethods = append(ch.allowedMethods, normalizedMethod) - } - } - - return nil - } -} - -// AllowedOrigins sets the allowed origins for CORS requests, as used in the -// 'Allow-Access-Control-Origin' HTTP header. -// Note: Passing in a []string{"*"} will allow any domain. -func AllowedOrigins(origins []string) CORSOption { - return func(ch *cors) error { - for _, v := range origins { - if v == corsOriginMatchAll { - ch.allowedOrigins = []string{corsOriginMatchAll} - return nil - } - } - - ch.allowedOrigins = origins - return nil - } -} - -// AllowedOriginValidator sets a function for evaluating allowed origins in CORS requests, represented by the -// 'Allow-Access-Control-Origin' HTTP header. -func AllowedOriginValidator(fn OriginValidator) CORSOption { - return func(ch *cors) error { - ch.allowedOriginValidator = fn - return nil - } -} - -// ExposeHeaders can be used to specify headers that are available -// and will not be stripped out by the user-agent. -func ExposedHeaders(headers []string) CORSOption { - return func(ch *cors) error { - ch.exposedHeaders = []string{} - for _, v := range headers { - normalizedHeader := http.CanonicalHeaderKey(strings.TrimSpace(v)) - if normalizedHeader == "" { - continue - } - - if !ch.isMatch(normalizedHeader, ch.exposedHeaders) { - ch.exposedHeaders = append(ch.exposedHeaders, normalizedHeader) - } - } - - return nil - } -} - -// MaxAge determines the maximum age (in seconds) between preflight requests. A -// maximum of 10 minutes is allowed. An age above this value will default to 10 -// minutes. -func MaxAge(age int) CORSOption { - return func(ch *cors) error { - // Maximum of 10 minutes. - if age > 600 { - age = 600 - } - - ch.maxAge = age - return nil - } -} - -// IgnoreOptions causes the CORS middleware to ignore OPTIONS requests, instead -// passing them through to the next handler. This is useful when your application -// or framework has a pre-existing mechanism for responding to OPTIONS requests. -func IgnoreOptions() CORSOption { - return func(ch *cors) error { - ch.ignoreOptions = true - return nil - } -} - -// AllowCredentials can be used to specify that the user agent may pass -// authentication details along with the request. -func AllowCredentials() CORSOption { - return func(ch *cors) error { - ch.allowCredentials = true - return nil - } -} - -func (ch *cors) isOriginAllowed(origin string) bool { - if origin == "" { - return false - } - - if ch.allowedOriginValidator != nil { - return ch.allowedOriginValidator(origin) - } - - for _, allowedOrigin := range ch.allowedOrigins { - if allowedOrigin == origin || allowedOrigin == corsOriginMatchAll { - return true - } - } - - return false -} - -func (ch *cors) isMatch(needle string, haystack []string) bool { - for _, v := range haystack { - if v == needle { - return true - } - } - - return false -} diff --git a/vendor/github.com/gorilla/handlers/doc.go b/vendor/github.com/gorilla/handlers/doc.go deleted file mode 100644 index 944e5a8..0000000 --- a/vendor/github.com/gorilla/handlers/doc.go +++ /dev/null @@ -1,9 +0,0 @@ -/* -Package handlers is a collection of handlers (aka "HTTP middleware") for use -with Go's net/http package (or any framework supporting http.Handler). - -The package includes handlers for logging in standardised formats, compressing -HTTP responses, validating content types and other useful tools for manipulating -requests and responses. -*/ -package handlers diff --git a/vendor/github.com/gorilla/handlers/handlers.go b/vendor/github.com/gorilla/handlers/handlers.go deleted file mode 100644 index 9544d2f..0000000 --- a/vendor/github.com/gorilla/handlers/handlers.go +++ /dev/null @@ -1,403 +0,0 @@ -// Copyright 2013 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package handlers - -import ( - "bufio" - "fmt" - "io" - "net" - "net/http" - "net/url" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -// MethodHandler is an http.Handler that dispatches to a handler whose key in the -// MethodHandler's map matches the name of the HTTP request's method, eg: GET -// -// If the request's method is OPTIONS and OPTIONS is not a key in the map then -// the handler responds with a status of 200 and sets the Allow header to a -// comma-separated list of available methods. -// -// If the request's method doesn't match any of its keys the handler responds -// with a status of HTTP 405 "Method Not Allowed" and sets the Allow header to a -// comma-separated list of available methods. -type MethodHandler map[string]http.Handler - -func (h MethodHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { - if handler, ok := h[req.Method]; ok { - handler.ServeHTTP(w, req) - } else { - allow := []string{} - for k := range h { - allow = append(allow, k) - } - sort.Strings(allow) - w.Header().Set("Allow", strings.Join(allow, ", ")) - if req.Method == "OPTIONS" { - w.WriteHeader(http.StatusOK) - } else { - http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) - } - } -} - -// loggingHandler is the http.Handler implementation for LoggingHandlerTo and its -// friends -type loggingHandler struct { - writer io.Writer - handler http.Handler -} - -// combinedLoggingHandler is the http.Handler implementation for LoggingHandlerTo -// and its friends -type combinedLoggingHandler struct { - writer io.Writer - handler http.Handler -} - -func (h loggingHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { - t := time.Now() - logger := makeLogger(w) - url := *req.URL - h.handler.ServeHTTP(logger, req) - writeLog(h.writer, req, url, t, logger.Status(), logger.Size()) -} - -func (h combinedLoggingHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { - t := time.Now() - logger := makeLogger(w) - url := *req.URL - h.handler.ServeHTTP(logger, req) - writeCombinedLog(h.writer, req, url, t, logger.Status(), logger.Size()) -} - -func makeLogger(w http.ResponseWriter) loggingResponseWriter { - var logger loggingResponseWriter = &responseLogger{w: w} - if _, ok := w.(http.Hijacker); ok { - logger = &hijackLogger{responseLogger{w: w}} - } - h, ok1 := logger.(http.Hijacker) - c, ok2 := w.(http.CloseNotifier) - if ok1 && ok2 { - return hijackCloseNotifier{logger, h, c} - } - if ok2 { - return &closeNotifyWriter{logger, c} - } - return logger -} - -type loggingResponseWriter interface { - http.ResponseWriter - http.Flusher - Status() int - Size() int -} - -// responseLogger is wrapper of http.ResponseWriter that keeps track of its HTTP -// status code and body size -type responseLogger struct { - w http.ResponseWriter - status int - size int -} - -func (l *responseLogger) Header() http.Header { - return l.w.Header() -} - -func (l *responseLogger) Write(b []byte) (int, error) { - if l.status == 0 { - // The status will be StatusOK if WriteHeader has not been called yet - l.status = http.StatusOK - } - size, err := l.w.Write(b) - l.size += size - return size, err -} - -func (l *responseLogger) WriteHeader(s int) { - l.w.WriteHeader(s) - l.status = s -} - -func (l *responseLogger) Status() int { - return l.status -} - -func (l *responseLogger) Size() int { - return l.size -} - -func (l *responseLogger) Flush() { - f, ok := l.w.(http.Flusher) - if ok { - f.Flush() - } -} - -type hijackLogger struct { - responseLogger -} - -func (l *hijackLogger) Hijack() (net.Conn, *bufio.ReadWriter, error) { - h := l.responseLogger.w.(http.Hijacker) - conn, rw, err := h.Hijack() - if err == nil && l.responseLogger.status == 0 { - // The status will be StatusSwitchingProtocols if there was no error and - // WriteHeader has not been called yet - l.responseLogger.status = http.StatusSwitchingProtocols - } - return conn, rw, err -} - -type closeNotifyWriter struct { - loggingResponseWriter - http.CloseNotifier -} - -type hijackCloseNotifier struct { - loggingResponseWriter - http.Hijacker - http.CloseNotifier -} - -const lowerhex = "0123456789abcdef" - -func appendQuoted(buf []byte, s string) []byte { - var runeTmp [utf8.UTFMax]byte - for width := 0; len(s) > 0; s = s[width:] { - r := rune(s[0]) - width = 1 - if r >= utf8.RuneSelf { - r, width = utf8.DecodeRuneInString(s) - } - if width == 1 && r == utf8.RuneError { - buf = append(buf, `\x`...) - buf = append(buf, lowerhex[s[0]>>4]) - buf = append(buf, lowerhex[s[0]&0xF]) - continue - } - if r == rune('"') || r == '\\' { // always backslashed - buf = append(buf, '\\') - buf = append(buf, byte(r)) - continue - } - if strconv.IsPrint(r) { - n := utf8.EncodeRune(runeTmp[:], r) - buf = append(buf, runeTmp[:n]...) - continue - } - switch r { - case '\a': - buf = append(buf, `\a`...) - case '\b': - buf = append(buf, `\b`...) - case '\f': - buf = append(buf, `\f`...) - case '\n': - buf = append(buf, `\n`...) - case '\r': - buf = append(buf, `\r`...) - case '\t': - buf = append(buf, `\t`...) - case '\v': - buf = append(buf, `\v`...) - default: - switch { - case r < ' ': - buf = append(buf, `\x`...) - buf = append(buf, lowerhex[s[0]>>4]) - buf = append(buf, lowerhex[s[0]&0xF]) - case r > utf8.MaxRune: - r = 0xFFFD - fallthrough - case r < 0x10000: - buf = append(buf, `\u`...) - for s := 12; s >= 0; s -= 4 { - buf = append(buf, lowerhex[r>>uint(s)&0xF]) - } - default: - buf = append(buf, `\U`...) - for s := 28; s >= 0; s -= 4 { - buf = append(buf, lowerhex[r>>uint(s)&0xF]) - } - } - } - } - return buf - -} - -// buildCommonLogLine builds a log entry for req in Apache Common Log Format. -// ts is the timestamp with which the entry should be logged. -// status and size are used to provide the response HTTP status and size. -func buildCommonLogLine(req *http.Request, url url.URL, ts time.Time, status int, size int) []byte { - username := "-" - if url.User != nil { - if name := url.User.Username(); name != "" { - username = name - } - } - - host, _, err := net.SplitHostPort(req.RemoteAddr) - - if err != nil { - host = req.RemoteAddr - } - - uri := req.RequestURI - - // Requests using the CONNECT method over HTTP/2.0 must use - // the authority field (aka r.Host) to identify the target. - // Refer: https://httpwg.github.io/specs/rfc7540.html#CONNECT - if req.ProtoMajor == 2 && req.Method == "CONNECT" { - uri = req.Host - } - if uri == "" { - uri = url.RequestURI() - } - - buf := make([]byte, 0, 3*(len(host)+len(username)+len(req.Method)+len(uri)+len(req.Proto)+50)/2) - buf = append(buf, host...) - buf = append(buf, " - "...) - buf = append(buf, username...) - buf = append(buf, " ["...) - buf = append(buf, ts.Format("02/Jan/2006:15:04:05 -0700")...) - buf = append(buf, `] "`...) - buf = append(buf, req.Method...) - buf = append(buf, " "...) - buf = appendQuoted(buf, uri) - buf = append(buf, " "...) - buf = append(buf, req.Proto...) - buf = append(buf, `" `...) - buf = append(buf, strconv.Itoa(status)...) - buf = append(buf, " "...) - buf = append(buf, strconv.Itoa(size)...) - return buf -} - -// writeLog writes a log entry for req to w in Apache Common Log Format. -// ts is the timestamp with which the entry should be logged. -// status and size are used to provide the response HTTP status and size. -func writeLog(w io.Writer, req *http.Request, url url.URL, ts time.Time, status, size int) { - buf := buildCommonLogLine(req, url, ts, status, size) - buf = append(buf, '\n') - w.Write(buf) -} - -// writeCombinedLog writes a log entry for req to w in Apache Combined Log Format. -// ts is the timestamp with which the entry should be logged. -// status and size are used to provide the response HTTP status and size. -func writeCombinedLog(w io.Writer, req *http.Request, url url.URL, ts time.Time, status, size int) { - buf := buildCommonLogLine(req, url, ts, status, size) - buf = append(buf, ` "`...) - buf = appendQuoted(buf, req.Referer()) - buf = append(buf, `" "`...) - buf = appendQuoted(buf, req.UserAgent()) - buf = append(buf, '"', '\n') - w.Write(buf) -} - -// CombinedLoggingHandler return a http.Handler that wraps h and logs requests to out in -// Apache Combined Log Format. -// -// See http://httpd.apache.org/docs/2.2/logs.html#combined for a description of this format. -// -// LoggingHandler always sets the ident field of the log to - -func CombinedLoggingHandler(out io.Writer, h http.Handler) http.Handler { - return combinedLoggingHandler{out, h} -} - -// LoggingHandler return a http.Handler that wraps h and logs requests to out in -// Apache Common Log Format (CLF). -// -// See http://httpd.apache.org/docs/2.2/logs.html#common for a description of this format. -// -// LoggingHandler always sets the ident field of the log to - -// -// Example: -// -// r := mux.NewRouter() -// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { -// w.Write([]byte("This is a catch-all route")) -// }) -// loggedRouter := handlers.LoggingHandler(os.Stdout, r) -// http.ListenAndServe(":1123", loggedRouter) -// -func LoggingHandler(out io.Writer, h http.Handler) http.Handler { - return loggingHandler{out, h} -} - -// isContentType validates the Content-Type header matches the supplied -// contentType. That is, its type and subtype match. -func isContentType(h http.Header, contentType string) bool { - ct := h.Get("Content-Type") - if i := strings.IndexRune(ct, ';'); i != -1 { - ct = ct[0:i] - } - return ct == contentType -} - -// ContentTypeHandler wraps and returns a http.Handler, validating the request -// content type is compatible with the contentTypes list. It writes a HTTP 415 -// error if that fails. -// -// Only PUT, POST, and PATCH requests are considered. -func ContentTypeHandler(h http.Handler, contentTypes ...string) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if !(r.Method == "PUT" || r.Method == "POST" || r.Method == "PATCH") { - h.ServeHTTP(w, r) - return - } - - for _, ct := range contentTypes { - if isContentType(r.Header, ct) { - h.ServeHTTP(w, r) - return - } - } - http.Error(w, fmt.Sprintf("Unsupported content type %q; expected one of %q", r.Header.Get("Content-Type"), contentTypes), http.StatusUnsupportedMediaType) - }) -} - -const ( - // HTTPMethodOverrideHeader is a commonly used - // http header to override a request method. - HTTPMethodOverrideHeader = "X-HTTP-Method-Override" - // HTTPMethodOverrideFormKey is a commonly used - // HTML form key to override a request method. - HTTPMethodOverrideFormKey = "_method" -) - -// HTTPMethodOverrideHandler wraps and returns a http.Handler which checks for -// the X-HTTP-Method-Override header or the _method form key, and overrides (if -// valid) request.Method with its value. -// -// This is especially useful for HTTP clients that don't support many http verbs. -// It isn't secure to override e.g a GET to a POST, so only POST requests are -// considered. Likewise, the override method can only be a "write" method: PUT, -// PATCH or DELETE. -// -// Form method takes precedence over header method. -func HTTPMethodOverrideHandler(h http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.Method == "POST" { - om := r.FormValue(HTTPMethodOverrideFormKey) - if om == "" { - om = r.Header.Get(HTTPMethodOverrideHeader) - } - if om == "PUT" || om == "PATCH" || om == "DELETE" { - r.Method = om - } - } - h.ServeHTTP(w, r) - }) -} diff --git a/vendor/github.com/gorilla/handlers/proxy_headers.go b/vendor/github.com/gorilla/handlers/proxy_headers.go deleted file mode 100644 index 0be750f..0000000 --- a/vendor/github.com/gorilla/handlers/proxy_headers.go +++ /dev/null @@ -1,120 +0,0 @@ -package handlers - -import ( - "net/http" - "regexp" - "strings" -) - -var ( - // De-facto standard header keys. - xForwardedFor = http.CanonicalHeaderKey("X-Forwarded-For") - xForwardedHost = http.CanonicalHeaderKey("X-Forwarded-Host") - xForwardedProto = http.CanonicalHeaderKey("X-Forwarded-Proto") - xForwardedScheme = http.CanonicalHeaderKey("X-Forwarded-Scheme") - xRealIP = http.CanonicalHeaderKey("X-Real-IP") -) - -var ( - // RFC7239 defines a new "Forwarded: " header designed to replace the - // existing use of X-Forwarded-* headers. - // e.g. Forwarded: for=192.0.2.60;proto=https;by=203.0.113.43 - forwarded = http.CanonicalHeaderKey("Forwarded") - // Allows for a sub-match of the first value after 'for=' to the next - // comma, semi-colon or space. The match is case-insensitive. - forRegex = regexp.MustCompile(`(?i)(?:for=)([^(;|,| )]+)`) - // Allows for a sub-match for the first instance of scheme (http|https) - // prefixed by 'proto='. The match is case-insensitive. - protoRegex = regexp.MustCompile(`(?i)(?:proto=)(https|http)`) -) - -// ProxyHeaders inspects common reverse proxy headers and sets the corresponding -// fields in the HTTP request struct. These are X-Forwarded-For and X-Real-IP -// for the remote (client) IP address, X-Forwarded-Proto or X-Forwarded-Scheme -// for the scheme (http|https) and the RFC7239 Forwarded header, which may -// include both client IPs and schemes. -// -// NOTE: This middleware should only be used when behind a reverse -// proxy like nginx, HAProxy or Apache. Reverse proxies that don't (or are -// configured not to) strip these headers from client requests, or where these -// headers are accepted "as is" from a remote client (e.g. when Go is not behind -// a proxy), can manifest as a vulnerability if your application uses these -// headers for validating the 'trustworthiness' of a request. -func ProxyHeaders(h http.Handler) http.Handler { - fn := func(w http.ResponseWriter, r *http.Request) { - // Set the remote IP with the value passed from the proxy. - if fwd := getIP(r); fwd != "" { - r.RemoteAddr = fwd - } - - // Set the scheme (proto) with the value passed from the proxy. - if scheme := getScheme(r); scheme != "" { - r.URL.Scheme = scheme - } - // Set the host with the value passed by the proxy - if r.Header.Get(xForwardedHost) != "" { - r.Host = r.Header.Get(xForwardedHost) - } - // Call the next handler in the chain. - h.ServeHTTP(w, r) - } - - return http.HandlerFunc(fn) -} - -// getIP retrieves the IP from the X-Forwarded-For, X-Real-IP and RFC7239 -// Forwarded headers (in that order). -func getIP(r *http.Request) string { - var addr string - - if fwd := r.Header.Get(xForwardedFor); fwd != "" { - // Only grab the first (client) address. Note that '192.168.0.1, - // 10.1.1.1' is a valid key for X-Forwarded-For where addresses after - // the first may represent forwarding proxies earlier in the chain. - s := strings.Index(fwd, ", ") - if s == -1 { - s = len(fwd) - } - addr = fwd[:s] - } else if fwd := r.Header.Get(xRealIP); fwd != "" { - // X-Real-IP should only contain one IP address (the client making the - // request). - addr = fwd - } else if fwd := r.Header.Get(forwarded); fwd != "" { - // match should contain at least two elements if the protocol was - // specified in the Forwarded header. The first element will always be - // the 'for=' capture, which we ignore. In the case of multiple IP - // addresses (for=8.8.8.8, 8.8.4.4,172.16.1.20 is valid) we only - // extract the first, which should be the client IP. - if match := forRegex.FindStringSubmatch(fwd); len(match) > 1 { - // IPv6 addresses in Forwarded headers are quoted-strings. We strip - // these quotes. - addr = strings.Trim(match[1], `"`) - } - } - - return addr -} - -// getScheme retrieves the scheme from the X-Forwarded-Proto and RFC7239 -// Forwarded headers (in that order). -func getScheme(r *http.Request) string { - var scheme string - - // Retrieve the scheme from X-Forwarded-Proto. - if proto := r.Header.Get(xForwardedProto); proto != "" { - scheme = strings.ToLower(proto) - } else if proto = r.Header.Get(xForwardedScheme); proto != "" { - scheme = strings.ToLower(proto) - } else if proto = r.Header.Get(forwarded); proto != "" { - // match should contain at least two elements if the protocol was - // specified in the Forwarded header. The first element will always be - // the 'proto=' capture, which we ignore. In the case of multiple proto - // parameters (invalid) we only extract the first. - if match := protoRegex.FindStringSubmatch(proto); len(match) > 1 { - scheme = strings.ToLower(match[1]) - } - } - - return scheme -} diff --git a/vendor/github.com/gorilla/handlers/recovery.go b/vendor/github.com/gorilla/handlers/recovery.go deleted file mode 100644 index 65b7de5..0000000 --- a/vendor/github.com/gorilla/handlers/recovery.go +++ /dev/null @@ -1,86 +0,0 @@ -package handlers - -import ( - "log" - "net/http" - "runtime/debug" -) - -type recoveryHandler struct { - handler http.Handler - logger *log.Logger - printStack bool -} - -// RecoveryOption provides a functional approach to define -// configuration for a handler; such as setting the logging -// whether or not to print strack traces on panic. -type RecoveryOption func(http.Handler) - -func parseRecoveryOptions(h http.Handler, opts ...RecoveryOption) http.Handler { - for _, option := range opts { - option(h) - } - - return h -} - -// RecoveryHandler is HTTP middleware that recovers from a panic, -// logs the panic, writes http.StatusInternalServerError, and -// continues to the next handler. -// -// Example: -// -// r := mux.NewRouter() -// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { -// panic("Unexpected error!") -// }) -// -// http.ListenAndServe(":1123", handlers.RecoveryHandler()(r)) -func RecoveryHandler(opts ...RecoveryOption) func(h http.Handler) http.Handler { - return func(h http.Handler) http.Handler { - r := &recoveryHandler{handler: h} - return parseRecoveryOptions(r, opts...) - } -} - -// RecoveryLogger is a functional option to override -// the default logger -func RecoveryLogger(logger *log.Logger) RecoveryOption { - return func(h http.Handler) { - r := h.(*recoveryHandler) - r.logger = logger - } -} - -// PrintRecoveryStack is a functional option to enable -// or disable printing stack traces on panic. -func PrintRecoveryStack(print bool) RecoveryOption { - return func(h http.Handler) { - r := h.(*recoveryHandler) - r.printStack = print - } -} - -func (h recoveryHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { - defer func() { - if err := recover(); err != nil { - w.WriteHeader(http.StatusInternalServerError) - h.log(err) - } - }() - - h.handler.ServeHTTP(w, req) -} - -func (h recoveryHandler) log(message interface{}) { - if h.logger != nil { - h.logger.Println(message) - } else { - log.Println(message) - } - - if h.printStack { - debug.PrintStack() - } -} diff --git a/vendor/github.com/gorilla/mux/LICENSE b/vendor/github.com/gorilla/mux/LICENSE deleted file mode 100644 index 0e5fb87..0000000 --- a/vendor/github.com/gorilla/mux/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 Rodrigo Moraes. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/mux/README.md b/vendor/github.com/gorilla/mux/README.md deleted file mode 100644 index 8dcd718..0000000 --- a/vendor/github.com/gorilla/mux/README.md +++ /dev/null @@ -1,383 +0,0 @@ -gorilla/mux -=== -[![GoDoc](https://godoc.org/github.com/gorilla/mux?status.svg)](https://godoc.org/github.com/gorilla/mux) -[![Build Status](https://travis-ci.org/gorilla/mux.svg?branch=master)](https://travis-ci.org/gorilla/mux) -[![Sourcegraph](https://sourcegraph.com/github.com/gorilla/mux/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/mux?badge) - -![Gorilla Logo](http://www.gorillatoolkit.org/static/images/gorilla-icon-64.png) - -http://www.gorillatoolkit.org/pkg/mux - -Package `gorilla/mux` implements a request router and dispatcher for matching incoming requests to -their respective handler. - -The name mux stands for "HTTP request multiplexer". Like the standard `http.ServeMux`, `mux.Router` matches incoming requests against a list of registered routes and calls a handler for the route that matches the URL or other conditions. The main features are: - -* It implements the `http.Handler` interface so it is compatible with the standard `http.ServeMux`. -* Requests can be matched based on URL host, path, path prefix, schemes, header and query values, HTTP methods or using custom matchers. -* URL hosts, paths and query values can have variables with an optional regular expression. -* Registered URLs can be built, or "reversed", which helps maintaining references to resources. -* Routes can be used as subrouters: nested routes are only tested if the parent route matches. This is useful to define groups of routes that share common conditions like a host, a path prefix or other repeated attributes. As a bonus, this optimizes request matching. - ---- - -* [Install](#install) -* [Examples](#examples) -* [Matching Routes](#matching-routes) -* [Static Files](#static-files) -* [Registered URLs](#registered-urls) -* [Walking Routes](#walking-routes) -* [Full Example](#full-example) - ---- - -## Install - -With a [correctly configured](https://golang.org/doc/install#testing) Go toolchain: - -```sh -go get -u github.com/gorilla/mux -``` - -## Examples - -Let's start registering a couple of URL paths and handlers: - -```go -func main() { - r := mux.NewRouter() - r.HandleFunc("/", HomeHandler) - r.HandleFunc("/products", ProductsHandler) - r.HandleFunc("/articles", ArticlesHandler) - http.Handle("/", r) -} -``` - -Here we register three routes mapping URL paths to handlers. This is equivalent to how `http.HandleFunc()` works: if an incoming request URL matches one of the paths, the corresponding handler is called passing (`http.ResponseWriter`, `*http.Request`) as parameters. - -Paths can have variables. They are defined using the format `{name}` or `{name:pattern}`. If a regular expression pattern is not defined, the matched variable will be anything until the next slash. For example: - -```go -r := mux.NewRouter() -r.HandleFunc("/products/{key}", ProductHandler) -r.HandleFunc("/articles/{category}/", ArticlesCategoryHandler) -r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler) -``` - -The names are used to create a map of route variables which can be retrieved calling `mux.Vars()`: - -```go -func ArticlesCategoryHandler(w http.ResponseWriter, r *http.Request) { - vars := mux.Vars(r) - w.WriteHeader(http.StatusOK) - fmt.Fprintf(w, "Category: %v\n", vars["category"]) -} -``` - -And this is all you need to know about the basic usage. More advanced options are explained below. - -### Matching Routes - -Routes can also be restricted to a domain or subdomain. Just define a host pattern to be matched. They can also have variables: - -```go -r := mux.NewRouter() -// Only matches if domain is "www.example.com". -r.Host("www.example.com") -// Matches a dynamic subdomain. -r.Host("{subdomain:[a-z]+}.domain.com") -``` - -There are several other matchers that can be added. To match path prefixes: - -```go -r.PathPrefix("/products/") -``` - -...or HTTP methods: - -```go -r.Methods("GET", "POST") -``` - -...or URL schemes: - -```go -r.Schemes("https") -``` - -...or header values: - -```go -r.Headers("X-Requested-With", "XMLHttpRequest") -``` - -...or query values: - -```go -r.Queries("key", "value") -``` - -...or to use a custom matcher function: - -```go -r.MatcherFunc(func(r *http.Request, rm *RouteMatch) bool { - return r.ProtoMajor == 0 -}) -``` - -...and finally, it is possible to combine several matchers in a single route: - -```go -r.HandleFunc("/products", ProductsHandler). - Host("www.example.com"). - Methods("GET"). - Schemes("http") -``` - -Setting the same matching conditions again and again can be boring, so we have a way to group several routes that share the same requirements. We call it "subrouting". - -For example, let's say we have several URLs that should only match when the host is `www.example.com`. Create a route for that host and get a "subrouter" from it: - -```go -r := mux.NewRouter() -s := r.Host("www.example.com").Subrouter() -``` - -Then register routes in the subrouter: - -```go -s.HandleFunc("/products/", ProductsHandler) -s.HandleFunc("/products/{key}", ProductHandler) -s.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler) -``` - -The three URL paths we registered above will only be tested if the domain is `www.example.com`, because the subrouter is tested first. This is not only convenient, but also optimizes request matching. You can create subrouters combining any attribute matchers accepted by a route. - -Subrouters can be used to create domain or path "namespaces": you define subrouters in a central place and then parts of the app can register its paths relatively to a given subrouter. - -There's one more thing about subroutes. When a subrouter has a path prefix, the inner routes use it as base for their paths: - -```go -r := mux.NewRouter() -s := r.PathPrefix("/products").Subrouter() -// "/products/" -s.HandleFunc("/", ProductsHandler) -// "/products/{key}/" -s.HandleFunc("/{key}/", ProductHandler) -// "/products/{key}/details" -s.HandleFunc("/{key}/details", ProductDetailsHandler) -``` -### Listing Routes - -Routes on a mux can be listed using the Router.Walk method—useful for generating documentation: - -```go -package main - -import ( - "fmt" - "net/http" - "strings" - - "github.com/gorilla/mux" -) - -func handler(w http.ResponseWriter, r *http.Request) { - return -} - -func main() { - r := mux.NewRouter() - r.HandleFunc("/", handler) - r.HandleFunc("/products", handler).Methods("POST") - r.HandleFunc("/articles", handler).Methods("GET") - r.HandleFunc("/articles/{id}", handler).Methods("GET", "PUT") - r.Walk(func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { - t, err := route.GetPathTemplate() - if err != nil { - return err - } - // p will contain regular expression is compatible with regular expression in Perl, Python, and other languages. - // for instance the regular expression for path '/articles/{id}' will be '^/articles/(?P[^/]+)$' - p, err := route.GetPathRegexp() - if err != nil { - return err - } - m, err := route.GetMethods() - if err != nil { - return err - } - fmt.Println(strings.Join(m, ","), t, p) - return nil - }) - http.Handle("/", r) -} -``` - -### Static Files - -Note that the path provided to `PathPrefix()` represents a "wildcard": calling -`PathPrefix("/static/").Handler(...)` means that the handler will be passed any -request that matches "/static/*". This makes it easy to serve static files with mux: - -```go -func main() { - var dir string - - flag.StringVar(&dir, "dir", ".", "the directory to serve files from. Defaults to the current dir") - flag.Parse() - r := mux.NewRouter() - - // This will serve files under http://localhost:8000/static/ - r.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir(dir)))) - - srv := &http.Server{ - Handler: r, - Addr: "127.0.0.1:8000", - // Good practice: enforce timeouts for servers you create! - WriteTimeout: 15 * time.Second, - ReadTimeout: 15 * time.Second, - } - - log.Fatal(srv.ListenAndServe()) -} -``` - -### Registered URLs - -Now let's see how to build registered URLs. - -Routes can be named. All routes that define a name can have their URLs built, or "reversed". We define a name calling `Name()` on a route. For example: - -```go -r := mux.NewRouter() -r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). - Name("article") -``` - -To build a URL, get the route and call the `URL()` method, passing a sequence of key/value pairs for the route variables. For the previous route, we would do: - -```go -url, err := r.Get("article").URL("category", "technology", "id", "42") -``` - -...and the result will be a `url.URL` with the following path: - -``` -"/articles/technology/42" -``` - -This also works for host and query value variables: - -```go -r := mux.NewRouter() -r.Host("{subdomain}.domain.com"). - Path("/articles/{category}/{id:[0-9]+}"). - Queries("filter", "{filter}"). - HandlerFunc(ArticleHandler). - Name("article") - -// url.String() will be "http://news.domain.com/articles/technology/42?filter=gorilla" -url, err := r.Get("article").URL("subdomain", "news", - "category", "technology", - "id", "42", - "filter", "gorilla") -``` - -All variables defined in the route are required, and their values must conform to the corresponding patterns. These requirements guarantee that a generated URL will always match a registered route -- the only exception is for explicitly defined "build-only" routes which never match. - -Regex support also exists for matching Headers within a route. For example, we could do: - -```go -r.HeadersRegexp("Content-Type", "application/(text|json)") -``` - -...and the route will match both requests with a Content-Type of `application/json` as well as `application/text` - -There's also a way to build only the URL host or path for a route: use the methods `URLHost()` or `URLPath()` instead. For the previous route, we would do: - -```go -// "http://news.domain.com/" -host, err := r.Get("article").URLHost("subdomain", "news") - -// "/articles/technology/42" -path, err := r.Get("article").URLPath("category", "technology", "id", "42") -``` - -And if you use subrouters, host and path defined separately can be built as well: - -```go -r := mux.NewRouter() -s := r.Host("{subdomain}.domain.com").Subrouter() -s.Path("/articles/{category}/{id:[0-9]+}"). - HandlerFunc(ArticleHandler). - Name("article") - -// "http://news.domain.com/articles/technology/42" -url, err := r.Get("article").URL("subdomain", "news", - "category", "technology", - "id", "42") -``` - -### Walking Routes - -The `Walk` function on `mux.Router` can be used to visit all of the routes that are registered on a router. For example, -the following prints all of the registered routes: - -```go -r := mux.NewRouter() -r.HandleFunc("/", handler) -r.HandleFunc("/products", handler).Methods("POST") -r.HandleFunc("/articles", handler).Methods("GET") -r.HandleFunc("/articles/{id}", handler).Methods("GET", "PUT") -r.Walk(func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { - t, err := route.GetPathTemplate() - if err != nil { - return err - } - // p will contain a regular expression that is compatible with regular expressions in Perl, Python, and other languages. - // For example, the regular expression for path '/articles/{id}' will be '^/articles/(?P[^/]+)$'. - p, err := route.GetPathRegexp() - if err != nil { - return err - } - m, err := route.GetMethods() - if err != nil { - return err - } - fmt.Println(strings.Join(m, ","), t, p) - return nil -}) -``` - -## Full Example - -Here's a complete, runnable example of a small `mux` based server: - -```go -package main - -import ( - "net/http" - "log" - "github.com/gorilla/mux" -) - -func YourHandler(w http.ResponseWriter, r *http.Request) { - w.Write([]byte("Gorilla!\n")) -} - -func main() { - r := mux.NewRouter() - // Routes consist of a path and a handler function. - r.HandleFunc("/", YourHandler) - - // Bind to a port and pass our router in - log.Fatal(http.ListenAndServe(":8000", r)) -} -``` - -## License - -BSD licensed. See the LICENSE file for details. diff --git a/vendor/github.com/gorilla/mux/context_gorilla.go b/vendor/github.com/gorilla/mux/context_gorilla.go deleted file mode 100644 index d7adaa8..0000000 --- a/vendor/github.com/gorilla/mux/context_gorilla.go +++ /dev/null @@ -1,26 +0,0 @@ -// +build !go1.7 - -package mux - -import ( - "net/http" - - "github.com/gorilla/context" -) - -func contextGet(r *http.Request, key interface{}) interface{} { - return context.Get(r, key) -} - -func contextSet(r *http.Request, key, val interface{}) *http.Request { - if val == nil { - return r - } - - context.Set(r, key, val) - return r -} - -func contextClear(r *http.Request) { - context.Clear(r) -} diff --git a/vendor/github.com/gorilla/mux/context_native.go b/vendor/github.com/gorilla/mux/context_native.go deleted file mode 100644 index 209cbea..0000000 --- a/vendor/github.com/gorilla/mux/context_native.go +++ /dev/null @@ -1,24 +0,0 @@ -// +build go1.7 - -package mux - -import ( - "context" - "net/http" -) - -func contextGet(r *http.Request, key interface{}) interface{} { - return r.Context().Value(key) -} - -func contextSet(r *http.Request, key, val interface{}) *http.Request { - if val == nil { - return r - } - - return r.WithContext(context.WithValue(r.Context(), key, val)) -} - -func contextClear(r *http.Request) { - return -} diff --git a/vendor/github.com/gorilla/mux/doc.go b/vendor/github.com/gorilla/mux/doc.go deleted file mode 100644 index cce30b2..0000000 --- a/vendor/github.com/gorilla/mux/doc.go +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package mux implements a request router and dispatcher. - -The name mux stands for "HTTP request multiplexer". Like the standard -http.ServeMux, mux.Router matches incoming requests against a list of -registered routes and calls a handler for the route that matches the URL -or other conditions. The main features are: - - * Requests can be matched based on URL host, path, path prefix, schemes, - header and query values, HTTP methods or using custom matchers. - * URL hosts, paths and query values can have variables with an optional - regular expression. - * Registered URLs can be built, or "reversed", which helps maintaining - references to resources. - * Routes can be used as subrouters: nested routes are only tested if the - parent route matches. This is useful to define groups of routes that - share common conditions like a host, a path prefix or other repeated - attributes. As a bonus, this optimizes request matching. - * It implements the http.Handler interface so it is compatible with the - standard http.ServeMux. - -Let's start registering a couple of URL paths and handlers: - - func main() { - r := mux.NewRouter() - r.HandleFunc("/", HomeHandler) - r.HandleFunc("/products", ProductsHandler) - r.HandleFunc("/articles", ArticlesHandler) - http.Handle("/", r) - } - -Here we register three routes mapping URL paths to handlers. This is -equivalent to how http.HandleFunc() works: if an incoming request URL matches -one of the paths, the corresponding handler is called passing -(http.ResponseWriter, *http.Request) as parameters. - -Paths can have variables. They are defined using the format {name} or -{name:pattern}. If a regular expression pattern is not defined, the matched -variable will be anything until the next slash. For example: - - r := mux.NewRouter() - r.HandleFunc("/products/{key}", ProductHandler) - r.HandleFunc("/articles/{category}/", ArticlesCategoryHandler) - r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler) - -Groups can be used inside patterns, as long as they are non-capturing (?:re). For example: - - r.HandleFunc("/articles/{category}/{sort:(?:asc|desc|new)}", ArticlesCategoryHandler) - -The names are used to create a map of route variables which can be retrieved -calling mux.Vars(): - - vars := mux.Vars(request) - category := vars["category"] - -Note that if any capturing groups are present, mux will panic() during parsing. To prevent -this, convert any capturing groups to non-capturing, e.g. change "/{sort:(asc|desc)}" to -"/{sort:(?:asc|desc)}". This is a change from prior versions which behaved unpredictably -when capturing groups were present. - -And this is all you need to know about the basic usage. More advanced options -are explained below. - -Routes can also be restricted to a domain or subdomain. Just define a host -pattern to be matched. They can also have variables: - - r := mux.NewRouter() - // Only matches if domain is "www.example.com". - r.Host("www.example.com") - // Matches a dynamic subdomain. - r.Host("{subdomain:[a-z]+}.domain.com") - -There are several other matchers that can be added. To match path prefixes: - - r.PathPrefix("/products/") - -...or HTTP methods: - - r.Methods("GET", "POST") - -...or URL schemes: - - r.Schemes("https") - -...or header values: - - r.Headers("X-Requested-With", "XMLHttpRequest") - -...or query values: - - r.Queries("key", "value") - -...or to use a custom matcher function: - - r.MatcherFunc(func(r *http.Request, rm *RouteMatch) bool { - return r.ProtoMajor == 0 - }) - -...and finally, it is possible to combine several matchers in a single route: - - r.HandleFunc("/products", ProductsHandler). - Host("www.example.com"). - Methods("GET"). - Schemes("http") - -Setting the same matching conditions again and again can be boring, so we have -a way to group several routes that share the same requirements. -We call it "subrouting". - -For example, let's say we have several URLs that should only match when the -host is "www.example.com". Create a route for that host and get a "subrouter" -from it: - - r := mux.NewRouter() - s := r.Host("www.example.com").Subrouter() - -Then register routes in the subrouter: - - s.HandleFunc("/products/", ProductsHandler) - s.HandleFunc("/products/{key}", ProductHandler) - s.HandleFunc("/articles/{category}/{id:[0-9]+}"), ArticleHandler) - -The three URL paths we registered above will only be tested if the domain is -"www.example.com", because the subrouter is tested first. This is not -only convenient, but also optimizes request matching. You can create -subrouters combining any attribute matchers accepted by a route. - -Subrouters can be used to create domain or path "namespaces": you define -subrouters in a central place and then parts of the app can register its -paths relatively to a given subrouter. - -There's one more thing about subroutes. When a subrouter has a path prefix, -the inner routes use it as base for their paths: - - r := mux.NewRouter() - s := r.PathPrefix("/products").Subrouter() - // "/products/" - s.HandleFunc("/", ProductsHandler) - // "/products/{key}/" - s.HandleFunc("/{key}/", ProductHandler) - // "/products/{key}/details" - s.HandleFunc("/{key}/details", ProductDetailsHandler) - -Note that the path provided to PathPrefix() represents a "wildcard": calling -PathPrefix("/static/").Handler(...) means that the handler will be passed any -request that matches "/static/*". This makes it easy to serve static files with mux: - - func main() { - var dir string - - flag.StringVar(&dir, "dir", ".", "the directory to serve files from. Defaults to the current dir") - flag.Parse() - r := mux.NewRouter() - - // This will serve files under http://localhost:8000/static/ - r.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir(dir)))) - - srv := &http.Server{ - Handler: r, - Addr: "127.0.0.1:8000", - // Good practice: enforce timeouts for servers you create! - WriteTimeout: 15 * time.Second, - ReadTimeout: 15 * time.Second, - } - - log.Fatal(srv.ListenAndServe()) - } - -Now let's see how to build registered URLs. - -Routes can be named. All routes that define a name can have their URLs built, -or "reversed". We define a name calling Name() on a route. For example: - - r := mux.NewRouter() - r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). - Name("article") - -To build a URL, get the route and call the URL() method, passing a sequence of -key/value pairs for the route variables. For the previous route, we would do: - - url, err := r.Get("article").URL("category", "technology", "id", "42") - -...and the result will be a url.URL with the following path: - - "/articles/technology/42" - -This also works for host and query value variables: - - r := mux.NewRouter() - r.Host("{subdomain}.domain.com"). - Path("/articles/{category}/{id:[0-9]+}"). - Queries("filter", "{filter}"). - HandlerFunc(ArticleHandler). - Name("article") - - // url.String() will be "http://news.domain.com/articles/technology/42?filter=gorilla" - url, err := r.Get("article").URL("subdomain", "news", - "category", "technology", - "id", "42", - "filter", "gorilla") - -All variables defined in the route are required, and their values must -conform to the corresponding patterns. These requirements guarantee that a -generated URL will always match a registered route -- the only exception is -for explicitly defined "build-only" routes which never match. - -Regex support also exists for matching Headers within a route. For example, we could do: - - r.HeadersRegexp("Content-Type", "application/(text|json)") - -...and the route will match both requests with a Content-Type of `application/json` as well as -`application/text` - -There's also a way to build only the URL host or path for a route: -use the methods URLHost() or URLPath() instead. For the previous route, -we would do: - - // "http://news.domain.com/" - host, err := r.Get("article").URLHost("subdomain", "news") - - // "/articles/technology/42" - path, err := r.Get("article").URLPath("category", "technology", "id", "42") - -And if you use subrouters, host and path defined separately can be built -as well: - - r := mux.NewRouter() - s := r.Host("{subdomain}.domain.com").Subrouter() - s.Path("/articles/{category}/{id:[0-9]+}"). - HandlerFunc(ArticleHandler). - Name("article") - - // "http://news.domain.com/articles/technology/42" - url, err := r.Get("article").URL("subdomain", "news", - "category", "technology", - "id", "42") -*/ -package mux diff --git a/vendor/github.com/gorilla/mux/mux.go b/vendor/github.com/gorilla/mux/mux.go deleted file mode 100644 index fb69196..0000000 --- a/vendor/github.com/gorilla/mux/mux.go +++ /dev/null @@ -1,580 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mux - -import ( - "errors" - "fmt" - "net/http" - "path" - "regexp" - "strings" -) - -var ( - ErrMethodMismatch = errors.New("method is not allowed") -) - -// NewRouter returns a new router instance. -func NewRouter() *Router { - return &Router{namedRoutes: make(map[string]*Route), KeepContext: false} -} - -// Router registers routes to be matched and dispatches a handler. -// -// It implements the http.Handler interface, so it can be registered to serve -// requests: -// -// var router = mux.NewRouter() -// -// func main() { -// http.Handle("/", router) -// } -// -// Or, for Google App Engine, register it in a init() function: -// -// func init() { -// http.Handle("/", router) -// } -// -// This will send all incoming requests to the router. -type Router struct { - // Configurable Handler to be used when no route matches. - NotFoundHandler http.Handler - - // Configurable Handler to be used when the request method does not match the route. - MethodNotAllowedHandler http.Handler - - // Parent route, if this is a subrouter. - parent parentRoute - // Routes to be matched, in order. - routes []*Route - // Routes by name for URL building. - namedRoutes map[string]*Route - // See Router.StrictSlash(). This defines the flag for new routes. - strictSlash bool - // See Router.SkipClean(). This defines the flag for new routes. - skipClean bool - // If true, do not clear the request context after handling the request. - // This has no effect when go1.7+ is used, since the context is stored - // on the request itself. - KeepContext bool - // see Router.UseEncodedPath(). This defines a flag for all routes. - useEncodedPath bool -} - -// Match matches registered routes against the request. -func (r *Router) Match(req *http.Request, match *RouteMatch) bool { - for _, route := range r.routes { - if route.Match(req, match) { - return true - } - } - - if match.MatchErr == ErrMethodMismatch && r.MethodNotAllowedHandler != nil { - match.Handler = r.MethodNotAllowedHandler - return true - } - - // Closest match for a router (includes sub-routers) - if r.NotFoundHandler != nil { - match.Handler = r.NotFoundHandler - return true - } - return false -} - -// ServeHTTP dispatches the handler registered in the matched route. -// -// When there is a match, the route variables can be retrieved calling -// mux.Vars(request). -func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) { - if !r.skipClean { - path := req.URL.Path - if r.useEncodedPath { - path = getPath(req) - } - // Clean path to canonical form and redirect. - if p := cleanPath(path); p != path { - - // Added 3 lines (Philip Schlump) - It was dropping the query string and #whatever from query. - // This matches with fix in go 1.2 r.c. 4 for same problem. Go Issue: - // http://code.google.com/p/go/issues/detail?id=5252 - url := *req.URL - url.Path = p - p = url.String() - - w.Header().Set("Location", p) - w.WriteHeader(http.StatusMovedPermanently) - return - } - } - var match RouteMatch - var handler http.Handler - if r.Match(req, &match) { - handler = match.Handler - req = setVars(req, match.Vars) - req = setCurrentRoute(req, match.Route) - } - - if handler == nil && match.MatchErr == ErrMethodMismatch { - handler = methodNotAllowedHandler() - } - - if handler == nil { - handler = http.NotFoundHandler() - } - - if !r.KeepContext { - defer contextClear(req) - } - handler.ServeHTTP(w, req) -} - -// Get returns a route registered with the given name. -func (r *Router) Get(name string) *Route { - return r.getNamedRoutes()[name] -} - -// GetRoute returns a route registered with the given name. This method -// was renamed to Get() and remains here for backwards compatibility. -func (r *Router) GetRoute(name string) *Route { - return r.getNamedRoutes()[name] -} - -// StrictSlash defines the trailing slash behavior for new routes. The initial -// value is false. -// -// When true, if the route path is "/path/", accessing "/path" will redirect -// to the former and vice versa. In other words, your application will always -// see the path as specified in the route. -// -// When false, if the route path is "/path", accessing "/path/" will not match -// this route and vice versa. -// -// Special case: when a route sets a path prefix using the PathPrefix() method, -// strict slash is ignored for that route because the redirect behavior can't -// be determined from a prefix alone. However, any subrouters created from that -// route inherit the original StrictSlash setting. -func (r *Router) StrictSlash(value bool) *Router { - r.strictSlash = value - return r -} - -// SkipClean defines the path cleaning behaviour for new routes. The initial -// value is false. Users should be careful about which routes are not cleaned -// -// When true, if the route path is "/path//to", it will remain with the double -// slash. This is helpful if you have a route like: /fetch/http://xkcd.com/534/ -// -// When false, the path will be cleaned, so /fetch/http://xkcd.com/534/ will -// become /fetch/http/xkcd.com/534 -func (r *Router) SkipClean(value bool) *Router { - r.skipClean = value - return r -} - -// UseEncodedPath tells the router to match the encoded original path -// to the routes. -// For eg. "/path/foo%2Fbar/to" will match the path "/path/{var}/to". -// This behavior has the drawback of needing to match routes against -// r.RequestURI instead of r.URL.Path. Any modifications (such as http.StripPrefix) -// to r.URL.Path will not affect routing when this flag is on and thus may -// induce unintended behavior. -// -// If not called, the router will match the unencoded path to the routes. -// For eg. "/path/foo%2Fbar/to" will match the path "/path/foo/bar/to" -func (r *Router) UseEncodedPath() *Router { - r.useEncodedPath = true - return r -} - -// ---------------------------------------------------------------------------- -// parentRoute -// ---------------------------------------------------------------------------- - -func (r *Router) getBuildScheme() string { - if r.parent != nil { - return r.parent.getBuildScheme() - } - return "" -} - -// getNamedRoutes returns the map where named routes are registered. -func (r *Router) getNamedRoutes() map[string]*Route { - if r.namedRoutes == nil { - if r.parent != nil { - r.namedRoutes = r.parent.getNamedRoutes() - } else { - r.namedRoutes = make(map[string]*Route) - } - } - return r.namedRoutes -} - -// getRegexpGroup returns regexp definitions from the parent route, if any. -func (r *Router) getRegexpGroup() *routeRegexpGroup { - if r.parent != nil { - return r.parent.getRegexpGroup() - } - return nil -} - -func (r *Router) buildVars(m map[string]string) map[string]string { - if r.parent != nil { - m = r.parent.buildVars(m) - } - return m -} - -// ---------------------------------------------------------------------------- -// Route factories -// ---------------------------------------------------------------------------- - -// NewRoute registers an empty route. -func (r *Router) NewRoute() *Route { - route := &Route{parent: r, strictSlash: r.strictSlash, skipClean: r.skipClean, useEncodedPath: r.useEncodedPath} - r.routes = append(r.routes, route) - return route -} - -// Handle registers a new route with a matcher for the URL path. -// See Route.Path() and Route.Handler(). -func (r *Router) Handle(path string, handler http.Handler) *Route { - return r.NewRoute().Path(path).Handler(handler) -} - -// HandleFunc registers a new route with a matcher for the URL path. -// See Route.Path() and Route.HandlerFunc(). -func (r *Router) HandleFunc(path string, f func(http.ResponseWriter, - *http.Request)) *Route { - return r.NewRoute().Path(path).HandlerFunc(f) -} - -// Headers registers a new route with a matcher for request header values. -// See Route.Headers(). -func (r *Router) Headers(pairs ...string) *Route { - return r.NewRoute().Headers(pairs...) -} - -// Host registers a new route with a matcher for the URL host. -// See Route.Host(). -func (r *Router) Host(tpl string) *Route { - return r.NewRoute().Host(tpl) -} - -// MatcherFunc registers a new route with a custom matcher function. -// See Route.MatcherFunc(). -func (r *Router) MatcherFunc(f MatcherFunc) *Route { - return r.NewRoute().MatcherFunc(f) -} - -// Methods registers a new route with a matcher for HTTP methods. -// See Route.Methods(). -func (r *Router) Methods(methods ...string) *Route { - return r.NewRoute().Methods(methods...) -} - -// Path registers a new route with a matcher for the URL path. -// See Route.Path(). -func (r *Router) Path(tpl string) *Route { - return r.NewRoute().Path(tpl) -} - -// PathPrefix registers a new route with a matcher for the URL path prefix. -// See Route.PathPrefix(). -func (r *Router) PathPrefix(tpl string) *Route { - return r.NewRoute().PathPrefix(tpl) -} - -// Queries registers a new route with a matcher for URL query values. -// See Route.Queries(). -func (r *Router) Queries(pairs ...string) *Route { - return r.NewRoute().Queries(pairs...) -} - -// Schemes registers a new route with a matcher for URL schemes. -// See Route.Schemes(). -func (r *Router) Schemes(schemes ...string) *Route { - return r.NewRoute().Schemes(schemes...) -} - -// BuildVarsFunc registers a new route with a custom function for modifying -// route variables before building a URL. -func (r *Router) BuildVarsFunc(f BuildVarsFunc) *Route { - return r.NewRoute().BuildVarsFunc(f) -} - -// Walk walks the router and all its sub-routers, calling walkFn for each route -// in the tree. The routes are walked in the order they were added. Sub-routers -// are explored depth-first. -func (r *Router) Walk(walkFn WalkFunc) error { - return r.walk(walkFn, []*Route{}) -} - -// SkipRouter is used as a return value from WalkFuncs to indicate that the -// router that walk is about to descend down to should be skipped. -var SkipRouter = errors.New("skip this router") - -// WalkFunc is the type of the function called for each route visited by Walk. -// At every invocation, it is given the current route, and the current router, -// and a list of ancestor routes that lead to the current route. -type WalkFunc func(route *Route, router *Router, ancestors []*Route) error - -func (r *Router) walk(walkFn WalkFunc, ancestors []*Route) error { - for _, t := range r.routes { - err := walkFn(t, r, ancestors) - if err == SkipRouter { - continue - } - if err != nil { - return err - } - for _, sr := range t.matchers { - if h, ok := sr.(*Router); ok { - ancestors = append(ancestors, t) - err := h.walk(walkFn, ancestors) - if err != nil { - return err - } - ancestors = ancestors[:len(ancestors)-1] - } - } - if h, ok := t.handler.(*Router); ok { - ancestors = append(ancestors, t) - err := h.walk(walkFn, ancestors) - if err != nil { - return err - } - ancestors = ancestors[:len(ancestors)-1] - } - } - return nil -} - -// ---------------------------------------------------------------------------- -// Context -// ---------------------------------------------------------------------------- - -// RouteMatch stores information about a matched route. -type RouteMatch struct { - Route *Route - Handler http.Handler - Vars map[string]string - - // MatchErr is set to appropriate matching error - // It is set to ErrMethodMismatch if there is a mismatch in - // the request method and route method - MatchErr error -} - -type contextKey int - -const ( - varsKey contextKey = iota - routeKey -) - -// Vars returns the route variables for the current request, if any. -func Vars(r *http.Request) map[string]string { - if rv := contextGet(r, varsKey); rv != nil { - return rv.(map[string]string) - } - return nil -} - -// CurrentRoute returns the matched route for the current request, if any. -// This only works when called inside the handler of the matched route -// because the matched route is stored in the request context which is cleared -// after the handler returns, unless the KeepContext option is set on the -// Router. -func CurrentRoute(r *http.Request) *Route { - if rv := contextGet(r, routeKey); rv != nil { - return rv.(*Route) - } - return nil -} - -func setVars(r *http.Request, val interface{}) *http.Request { - return contextSet(r, varsKey, val) -} - -func setCurrentRoute(r *http.Request, val interface{}) *http.Request { - return contextSet(r, routeKey, val) -} - -// ---------------------------------------------------------------------------- -// Helpers -// ---------------------------------------------------------------------------- - -// getPath returns the escaped path if possible; doing what URL.EscapedPath() -// which was added in go1.5 does -func getPath(req *http.Request) string { - if req.RequestURI != "" { - // Extract the path from RequestURI (which is escaped unlike URL.Path) - // as detailed here as detailed in https://golang.org/pkg/net/url/#URL - // for < 1.5 server side workaround - // http://localhost/path/here?v=1 -> /path/here - path := req.RequestURI - path = strings.TrimPrefix(path, req.URL.Scheme+`://`) - path = strings.TrimPrefix(path, req.URL.Host) - if i := strings.LastIndex(path, "?"); i > -1 { - path = path[:i] - } - if i := strings.LastIndex(path, "#"); i > -1 { - path = path[:i] - } - return path - } - return req.URL.Path -} - -// cleanPath returns the canonical path for p, eliminating . and .. elements. -// Borrowed from the net/http package. -func cleanPath(p string) string { - if p == "" { - return "/" - } - if p[0] != '/' { - p = "/" + p - } - np := path.Clean(p) - // path.Clean removes trailing slash except for root; - // put the trailing slash back if necessary. - if p[len(p)-1] == '/' && np != "/" { - np += "/" - } - - return np -} - -// uniqueVars returns an error if two slices contain duplicated strings. -func uniqueVars(s1, s2 []string) error { - for _, v1 := range s1 { - for _, v2 := range s2 { - if v1 == v2 { - return fmt.Errorf("mux: duplicated route variable %q", v2) - } - } - } - return nil -} - -// checkPairs returns the count of strings passed in, and an error if -// the count is not an even number. -func checkPairs(pairs ...string) (int, error) { - length := len(pairs) - if length%2 != 0 { - return length, fmt.Errorf( - "mux: number of parameters must be multiple of 2, got %v", pairs) - } - return length, nil -} - -// mapFromPairsToString converts variadic string parameters to a -// string to string map. -func mapFromPairsToString(pairs ...string) (map[string]string, error) { - length, err := checkPairs(pairs...) - if err != nil { - return nil, err - } - m := make(map[string]string, length/2) - for i := 0; i < length; i += 2 { - m[pairs[i]] = pairs[i+1] - } - return m, nil -} - -// mapFromPairsToRegex converts variadic string parameters to a -// string to regex map. -func mapFromPairsToRegex(pairs ...string) (map[string]*regexp.Regexp, error) { - length, err := checkPairs(pairs...) - if err != nil { - return nil, err - } - m := make(map[string]*regexp.Regexp, length/2) - for i := 0; i < length; i += 2 { - regex, err := regexp.Compile(pairs[i+1]) - if err != nil { - return nil, err - } - m[pairs[i]] = regex - } - return m, nil -} - -// matchInArray returns true if the given string value is in the array. -func matchInArray(arr []string, value string) bool { - for _, v := range arr { - if v == value { - return true - } - } - return false -} - -// matchMapWithString returns true if the given key/value pairs exist in a given map. -func matchMapWithString(toCheck map[string]string, toMatch map[string][]string, canonicalKey bool) bool { - for k, v := range toCheck { - // Check if key exists. - if canonicalKey { - k = http.CanonicalHeaderKey(k) - } - if values := toMatch[k]; values == nil { - return false - } else if v != "" { - // If value was defined as an empty string we only check that the - // key exists. Otherwise we also check for equality. - valueExists := false - for _, value := range values { - if v == value { - valueExists = true - break - } - } - if !valueExists { - return false - } - } - } - return true -} - -// matchMapWithRegex returns true if the given key/value pairs exist in a given map compiled against -// the given regex -func matchMapWithRegex(toCheck map[string]*regexp.Regexp, toMatch map[string][]string, canonicalKey bool) bool { - for k, v := range toCheck { - // Check if key exists. - if canonicalKey { - k = http.CanonicalHeaderKey(k) - } - if values := toMatch[k]; values == nil { - return false - } else if v != nil { - // If value was defined as an empty string we only check that the - // key exists. Otherwise we also check for equality. - valueExists := false - for _, value := range values { - if v.MatchString(value) { - valueExists = true - break - } - } - if !valueExists { - return false - } - } - } - return true -} - -// methodNotAllowed replies to the request with an HTTP status code 405. -func methodNotAllowed(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusMethodNotAllowed) -} - -// methodNotAllowedHandler returns a simple request handler -// that replies to each request with a status code 405. -func methodNotAllowedHandler() http.Handler { return http.HandlerFunc(methodNotAllowed) } diff --git a/vendor/github.com/gorilla/mux/regexp.go b/vendor/github.com/gorilla/mux/regexp.go deleted file mode 100644 index 80d1f78..0000000 --- a/vendor/github.com/gorilla/mux/regexp.go +++ /dev/null @@ -1,326 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mux - -import ( - "bytes" - "fmt" - "net/http" - "net/url" - "regexp" - "strconv" - "strings" -) - -// newRouteRegexp parses a route template and returns a routeRegexp, -// used to match a host, a path or a query string. -// -// It will extract named variables, assemble a regexp to be matched, create -// a "reverse" template to build URLs and compile regexps to validate variable -// values used in URL building. -// -// Previously we accepted only Python-like identifiers for variable -// names ([a-zA-Z_][a-zA-Z0-9_]*), but currently the only restriction is that -// name and pattern can't be empty, and names can't contain a colon. -func newRouteRegexp(tpl string, matchHost, matchPrefix, matchQuery, strictSlash, useEncodedPath bool) (*routeRegexp, error) { - // Check if it is well-formed. - idxs, errBraces := braceIndices(tpl) - if errBraces != nil { - return nil, errBraces - } - // Backup the original. - template := tpl - // Now let's parse it. - defaultPattern := "[^/]+" - if matchQuery { - defaultPattern = ".*" - } else if matchHost { - defaultPattern = "[^.]+" - matchPrefix = false - } - // Only match strict slash if not matching - if matchPrefix || matchHost || matchQuery { - strictSlash = false - } - // Set a flag for strictSlash. - endSlash := false - if strictSlash && strings.HasSuffix(tpl, "/") { - tpl = tpl[:len(tpl)-1] - endSlash = true - } - varsN := make([]string, len(idxs)/2) - varsR := make([]*regexp.Regexp, len(idxs)/2) - pattern := bytes.NewBufferString("") - pattern.WriteByte('^') - reverse := bytes.NewBufferString("") - var end int - var err error - for i := 0; i < len(idxs); i += 2 { - // Set all values we are interested in. - raw := tpl[end:idxs[i]] - end = idxs[i+1] - parts := strings.SplitN(tpl[idxs[i]+1:end-1], ":", 2) - name := parts[0] - patt := defaultPattern - if len(parts) == 2 { - patt = parts[1] - } - // Name or pattern can't be empty. - if name == "" || patt == "" { - return nil, fmt.Errorf("mux: missing name or pattern in %q", - tpl[idxs[i]:end]) - } - // Build the regexp pattern. - fmt.Fprintf(pattern, "%s(?P<%s>%s)", regexp.QuoteMeta(raw), varGroupName(i/2), patt) - - // Build the reverse template. - fmt.Fprintf(reverse, "%s%%s", raw) - - // Append variable name and compiled pattern. - varsN[i/2] = name - varsR[i/2], err = regexp.Compile(fmt.Sprintf("^%s$", patt)) - if err != nil { - return nil, err - } - } - // Add the remaining. - raw := tpl[end:] - pattern.WriteString(regexp.QuoteMeta(raw)) - if strictSlash { - pattern.WriteString("[/]?") - } - if matchQuery { - // Add the default pattern if the query value is empty - if queryVal := strings.SplitN(template, "=", 2)[1]; queryVal == "" { - pattern.WriteString(defaultPattern) - } - } - if !matchPrefix { - pattern.WriteByte('$') - } - reverse.WriteString(raw) - if endSlash { - reverse.WriteByte('/') - } - // Compile full regexp. - reg, errCompile := regexp.Compile(pattern.String()) - if errCompile != nil { - return nil, errCompile - } - - // Check for capturing groups which used to work in older versions - if reg.NumSubexp() != len(idxs)/2 { - panic(fmt.Sprintf("route %s contains capture groups in its regexp. ", template) + - "Only non-capturing groups are accepted: e.g. (?:pattern) instead of (pattern)") - } - - // Done! - return &routeRegexp{ - template: template, - matchHost: matchHost, - matchQuery: matchQuery, - strictSlash: strictSlash, - useEncodedPath: useEncodedPath, - regexp: reg, - reverse: reverse.String(), - varsN: varsN, - varsR: varsR, - }, nil -} - -// routeRegexp stores a regexp to match a host or path and information to -// collect and validate route variables. -type routeRegexp struct { - // The unmodified template. - template string - // True for host match, false for path or query string match. - matchHost bool - // True for query string match, false for path and host match. - matchQuery bool - // The strictSlash value defined on the route, but disabled if PathPrefix was used. - strictSlash bool - // Determines whether to use encoded path from getPath function or unencoded - // req.URL.Path for path matching - useEncodedPath bool - // Expanded regexp. - regexp *regexp.Regexp - // Reverse template. - reverse string - // Variable names. - varsN []string - // Variable regexps (validators). - varsR []*regexp.Regexp -} - -// Match matches the regexp against the URL host or path. -func (r *routeRegexp) Match(req *http.Request, match *RouteMatch) bool { - if !r.matchHost { - if r.matchQuery { - return r.matchQueryString(req) - } - path := req.URL.Path - if r.useEncodedPath { - path = getPath(req) - } - return r.regexp.MatchString(path) - } - - return r.regexp.MatchString(getHost(req)) -} - -// url builds a URL part using the given values. -func (r *routeRegexp) url(values map[string]string) (string, error) { - urlValues := make([]interface{}, len(r.varsN)) - for k, v := range r.varsN { - value, ok := values[v] - if !ok { - return "", fmt.Errorf("mux: missing route variable %q", v) - } - if r.matchQuery { - value = url.QueryEscape(value) - } - urlValues[k] = value - } - rv := fmt.Sprintf(r.reverse, urlValues...) - if !r.regexp.MatchString(rv) { - // The URL is checked against the full regexp, instead of checking - // individual variables. This is faster but to provide a good error - // message, we check individual regexps if the URL doesn't match. - for k, v := range r.varsN { - if !r.varsR[k].MatchString(values[v]) { - return "", fmt.Errorf( - "mux: variable %q doesn't match, expected %q", values[v], - r.varsR[k].String()) - } - } - } - return rv, nil -} - -// getURLQuery returns a single query parameter from a request URL. -// For a URL with foo=bar&baz=ding, we return only the relevant key -// value pair for the routeRegexp. -func (r *routeRegexp) getURLQuery(req *http.Request) string { - if !r.matchQuery { - return "" - } - templateKey := strings.SplitN(r.template, "=", 2)[0] - for key, vals := range req.URL.Query() { - if key == templateKey && len(vals) > 0 { - return key + "=" + vals[0] - } - } - return "" -} - -func (r *routeRegexp) matchQueryString(req *http.Request) bool { - return r.regexp.MatchString(r.getURLQuery(req)) -} - -// braceIndices returns the first level curly brace indices from a string. -// It returns an error in case of unbalanced braces. -func braceIndices(s string) ([]int, error) { - var level, idx int - var idxs []int - for i := 0; i < len(s); i++ { - switch s[i] { - case '{': - if level++; level == 1 { - idx = i - } - case '}': - if level--; level == 0 { - idxs = append(idxs, idx, i+1) - } else if level < 0 { - return nil, fmt.Errorf("mux: unbalanced braces in %q", s) - } - } - } - if level != 0 { - return nil, fmt.Errorf("mux: unbalanced braces in %q", s) - } - return idxs, nil -} - -// varGroupName builds a capturing group name for the indexed variable. -func varGroupName(idx int) string { - return "v" + strconv.Itoa(idx) -} - -// ---------------------------------------------------------------------------- -// routeRegexpGroup -// ---------------------------------------------------------------------------- - -// routeRegexpGroup groups the route matchers that carry variables. -type routeRegexpGroup struct { - host *routeRegexp - path *routeRegexp - queries []*routeRegexp -} - -// setMatch extracts the variables from the URL once a route matches. -func (v *routeRegexpGroup) setMatch(req *http.Request, m *RouteMatch, r *Route) { - // Store host variables. - if v.host != nil { - host := getHost(req) - matches := v.host.regexp.FindStringSubmatchIndex(host) - if len(matches) > 0 { - extractVars(host, matches, v.host.varsN, m.Vars) - } - } - path := req.URL.Path - if r.useEncodedPath { - path = getPath(req) - } - // Store path variables. - if v.path != nil { - matches := v.path.regexp.FindStringSubmatchIndex(path) - if len(matches) > 0 { - extractVars(path, matches, v.path.varsN, m.Vars) - // Check if we should redirect. - if v.path.strictSlash { - p1 := strings.HasSuffix(path, "/") - p2 := strings.HasSuffix(v.path.template, "/") - if p1 != p2 { - u, _ := url.Parse(req.URL.String()) - if p1 { - u.Path = u.Path[:len(u.Path)-1] - } else { - u.Path += "/" - } - m.Handler = http.RedirectHandler(u.String(), 301) - } - } - } - } - // Store query string variables. - for _, q := range v.queries { - queryURL := q.getURLQuery(req) - matches := q.regexp.FindStringSubmatchIndex(queryURL) - if len(matches) > 0 { - extractVars(queryURL, matches, q.varsN, m.Vars) - } - } -} - -// getHost tries its best to return the request host. -func getHost(r *http.Request) string { - if r.URL.IsAbs() { - return r.URL.Host - } - host := r.Host - // Slice off any port information. - if i := strings.Index(host, ":"); i != -1 { - host = host[:i] - } - return host - -} - -func extractVars(input string, matches []int, names []string, output map[string]string) { - for i, name := range names { - output[name] = input[matches[2*i+2]:matches[2*i+3]] - } -} diff --git a/vendor/github.com/gorilla/mux/route.go b/vendor/github.com/gorilla/mux/route.go deleted file mode 100644 index 6863adb..0000000 --- a/vendor/github.com/gorilla/mux/route.go +++ /dev/null @@ -1,713 +0,0 @@ -// Copyright 2012 The Gorilla Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mux - -import ( - "errors" - "fmt" - "net/http" - "net/url" - "regexp" - "strings" -) - -// Route stores information to match a request and build URLs. -type Route struct { - // Parent where the route was registered (a Router). - parent parentRoute - // Request handler for the route. - handler http.Handler - // List of matchers. - matchers []matcher - // Manager for the variables from host and path. - regexp *routeRegexpGroup - // If true, when the path pattern is "/path/", accessing "/path" will - // redirect to the former and vice versa. - strictSlash bool - // If true, when the path pattern is "/path//to", accessing "/path//to" - // will not redirect - skipClean bool - // If true, "/path/foo%2Fbar/to" will match the path "/path/{var}/to" - useEncodedPath bool - // The scheme used when building URLs. - buildScheme string - // If true, this route never matches: it is only used to build URLs. - buildOnly bool - // The name used to build URLs. - name string - // Error resulted from building a route. - err error - - buildVarsFunc BuildVarsFunc -} - -func (r *Route) SkipClean() bool { - return r.skipClean -} - -// Match matches the route against the request. -func (r *Route) Match(req *http.Request, match *RouteMatch) bool { - if r.buildOnly || r.err != nil { - return false - } - - var matchErr error - - // Match everything. - for _, m := range r.matchers { - if matched := m.Match(req, match); !matched { - if _, ok := m.(methodMatcher); ok { - matchErr = ErrMethodMismatch - continue - } - matchErr = nil - return false - } - } - - if matchErr != nil { - match.MatchErr = matchErr - return false - } - - match.MatchErr = nil - // Yay, we have a match. Let's collect some info about it. - if match.Route == nil { - match.Route = r - } - if match.Handler == nil { - match.Handler = r.handler - } - if match.Vars == nil { - match.Vars = make(map[string]string) - } - - // Set variables. - if r.regexp != nil { - r.regexp.setMatch(req, match, r) - } - return true -} - -// ---------------------------------------------------------------------------- -// Route attributes -// ---------------------------------------------------------------------------- - -// GetError returns an error resulted from building the route, if any. -func (r *Route) GetError() error { - return r.err -} - -// BuildOnly sets the route to never match: it is only used to build URLs. -func (r *Route) BuildOnly() *Route { - r.buildOnly = true - return r -} - -// Handler -------------------------------------------------------------------- - -// Handler sets a handler for the route. -func (r *Route) Handler(handler http.Handler) *Route { - if r.err == nil { - r.handler = handler - } - return r -} - -// HandlerFunc sets a handler function for the route. -func (r *Route) HandlerFunc(f func(http.ResponseWriter, *http.Request)) *Route { - return r.Handler(http.HandlerFunc(f)) -} - -// GetHandler returns the handler for the route, if any. -func (r *Route) GetHandler() http.Handler { - return r.handler -} - -// Name ----------------------------------------------------------------------- - -// Name sets the name for the route, used to build URLs. -// If the name was registered already it will be overwritten. -func (r *Route) Name(name string) *Route { - if r.name != "" { - r.err = fmt.Errorf("mux: route already has name %q, can't set %q", - r.name, name) - } - if r.err == nil { - r.name = name - r.getNamedRoutes()[name] = r - } - return r -} - -// GetName returns the name for the route, if any. -func (r *Route) GetName() string { - return r.name -} - -// ---------------------------------------------------------------------------- -// Matchers -// ---------------------------------------------------------------------------- - -// matcher types try to match a request. -type matcher interface { - Match(*http.Request, *RouteMatch) bool -} - -// addMatcher adds a matcher to the route. -func (r *Route) addMatcher(m matcher) *Route { - if r.err == nil { - r.matchers = append(r.matchers, m) - } - return r -} - -// addRegexpMatcher adds a host or path matcher and builder to a route. -func (r *Route) addRegexpMatcher(tpl string, matchHost, matchPrefix, matchQuery bool) error { - if r.err != nil { - return r.err - } - r.regexp = r.getRegexpGroup() - if !matchHost && !matchQuery { - if len(tpl) > 0 && tpl[0] != '/' { - return fmt.Errorf("mux: path must start with a slash, got %q", tpl) - } - if r.regexp.path != nil { - tpl = strings.TrimRight(r.regexp.path.template, "/") + tpl - } - } - rr, err := newRouteRegexp(tpl, matchHost, matchPrefix, matchQuery, r.strictSlash, r.useEncodedPath) - if err != nil { - return err - } - for _, q := range r.regexp.queries { - if err = uniqueVars(rr.varsN, q.varsN); err != nil { - return err - } - } - if matchHost { - if r.regexp.path != nil { - if err = uniqueVars(rr.varsN, r.regexp.path.varsN); err != nil { - return err - } - } - r.regexp.host = rr - } else { - if r.regexp.host != nil { - if err = uniqueVars(rr.varsN, r.regexp.host.varsN); err != nil { - return err - } - } - if matchQuery { - r.regexp.queries = append(r.regexp.queries, rr) - } else { - r.regexp.path = rr - } - } - r.addMatcher(rr) - return nil -} - -// Headers -------------------------------------------------------------------- - -// headerMatcher matches the request against header values. -type headerMatcher map[string]string - -func (m headerMatcher) Match(r *http.Request, match *RouteMatch) bool { - return matchMapWithString(m, r.Header, true) -} - -// Headers adds a matcher for request header values. -// It accepts a sequence of key/value pairs to be matched. For example: -// -// r := mux.NewRouter() -// r.Headers("Content-Type", "application/json", -// "X-Requested-With", "XMLHttpRequest") -// -// The above route will only match if both request header values match. -// If the value is an empty string, it will match any value if the key is set. -func (r *Route) Headers(pairs ...string) *Route { - if r.err == nil { - var headers map[string]string - headers, r.err = mapFromPairsToString(pairs...) - return r.addMatcher(headerMatcher(headers)) - } - return r -} - -// headerRegexMatcher matches the request against the route given a regex for the header -type headerRegexMatcher map[string]*regexp.Regexp - -func (m headerRegexMatcher) Match(r *http.Request, match *RouteMatch) bool { - return matchMapWithRegex(m, r.Header, true) -} - -// HeadersRegexp accepts a sequence of key/value pairs, where the value has regex -// support. For example: -// -// r := mux.NewRouter() -// r.HeadersRegexp("Content-Type", "application/(text|json)", -// "X-Requested-With", "XMLHttpRequest") -// -// The above route will only match if both the request header matches both regular expressions. -// It the value is an empty string, it will match any value if the key is set. -func (r *Route) HeadersRegexp(pairs ...string) *Route { - if r.err == nil { - var headers map[string]*regexp.Regexp - headers, r.err = mapFromPairsToRegex(pairs...) - return r.addMatcher(headerRegexMatcher(headers)) - } - return r -} - -// Host ----------------------------------------------------------------------- - -// Host adds a matcher for the URL host. -// It accepts a template with zero or more URL variables enclosed by {}. -// Variables can define an optional regexp pattern to be matched: -// -// - {name} matches anything until the next dot. -// -// - {name:pattern} matches the given regexp pattern. -// -// For example: -// -// r := mux.NewRouter() -// r.Host("www.example.com") -// r.Host("{subdomain}.domain.com") -// r.Host("{subdomain:[a-z]+}.domain.com") -// -// Variable names must be unique in a given route. They can be retrieved -// calling mux.Vars(request). -func (r *Route) Host(tpl string) *Route { - r.err = r.addRegexpMatcher(tpl, true, false, false) - return r -} - -// MatcherFunc ---------------------------------------------------------------- - -// MatcherFunc is the function signature used by custom matchers. -type MatcherFunc func(*http.Request, *RouteMatch) bool - -// Match returns the match for a given request. -func (m MatcherFunc) Match(r *http.Request, match *RouteMatch) bool { - return m(r, match) -} - -// MatcherFunc adds a custom function to be used as request matcher. -func (r *Route) MatcherFunc(f MatcherFunc) *Route { - return r.addMatcher(f) -} - -// Methods -------------------------------------------------------------------- - -// methodMatcher matches the request against HTTP methods. -type methodMatcher []string - -func (m methodMatcher) Match(r *http.Request, match *RouteMatch) bool { - return matchInArray(m, r.Method) -} - -// Methods adds a matcher for HTTP methods. -// It accepts a sequence of one or more methods to be matched, e.g.: -// "GET", "POST", "PUT". -func (r *Route) Methods(methods ...string) *Route { - for k, v := range methods { - methods[k] = strings.ToUpper(v) - } - return r.addMatcher(methodMatcher(methods)) -} - -// Path ----------------------------------------------------------------------- - -// Path adds a matcher for the URL path. -// It accepts a template with zero or more URL variables enclosed by {}. The -// template must start with a "/". -// Variables can define an optional regexp pattern to be matched: -// -// - {name} matches anything until the next slash. -// -// - {name:pattern} matches the given regexp pattern. -// -// For example: -// -// r := mux.NewRouter() -// r.Path("/products/").Handler(ProductsHandler) -// r.Path("/products/{key}").Handler(ProductsHandler) -// r.Path("/articles/{category}/{id:[0-9]+}"). -// Handler(ArticleHandler) -// -// Variable names must be unique in a given route. They can be retrieved -// calling mux.Vars(request). -func (r *Route) Path(tpl string) *Route { - r.err = r.addRegexpMatcher(tpl, false, false, false) - return r -} - -// PathPrefix ----------------------------------------------------------------- - -// PathPrefix adds a matcher for the URL path prefix. This matches if the given -// template is a prefix of the full URL path. See Route.Path() for details on -// the tpl argument. -// -// Note that it does not treat slashes specially ("/foobar/" will be matched by -// the prefix "/foo") so you may want to use a trailing slash here. -// -// Also note that the setting of Router.StrictSlash() has no effect on routes -// with a PathPrefix matcher. -func (r *Route) PathPrefix(tpl string) *Route { - r.err = r.addRegexpMatcher(tpl, false, true, false) - return r -} - -// Query ---------------------------------------------------------------------- - -// Queries adds a matcher for URL query values. -// It accepts a sequence of key/value pairs. Values may define variables. -// For example: -// -// r := mux.NewRouter() -// r.Queries("foo", "bar", "id", "{id:[0-9]+}") -// -// The above route will only match if the URL contains the defined queries -// values, e.g.: ?foo=bar&id=42. -// -// It the value is an empty string, it will match any value if the key is set. -// -// Variables can define an optional regexp pattern to be matched: -// -// - {name} matches anything until the next slash. -// -// - {name:pattern} matches the given regexp pattern. -func (r *Route) Queries(pairs ...string) *Route { - length := len(pairs) - if length%2 != 0 { - r.err = fmt.Errorf( - "mux: number of parameters must be multiple of 2, got %v", pairs) - return nil - } - for i := 0; i < length; i += 2 { - if r.err = r.addRegexpMatcher(pairs[i]+"="+pairs[i+1], false, false, true); r.err != nil { - return r - } - } - - return r -} - -// Schemes -------------------------------------------------------------------- - -// schemeMatcher matches the request against URL schemes. -type schemeMatcher []string - -func (m schemeMatcher) Match(r *http.Request, match *RouteMatch) bool { - return matchInArray(m, r.URL.Scheme) -} - -// Schemes adds a matcher for URL schemes. -// It accepts a sequence of schemes to be matched, e.g.: "http", "https". -func (r *Route) Schemes(schemes ...string) *Route { - for k, v := range schemes { - schemes[k] = strings.ToLower(v) - } - if r.buildScheme == "" && len(schemes) > 0 { - r.buildScheme = schemes[0] - } - return r.addMatcher(schemeMatcher(schemes)) -} - -// BuildVarsFunc -------------------------------------------------------------- - -// BuildVarsFunc is the function signature used by custom build variable -// functions (which can modify route variables before a route's URL is built). -type BuildVarsFunc func(map[string]string) map[string]string - -// BuildVarsFunc adds a custom function to be used to modify build variables -// before a route's URL is built. -func (r *Route) BuildVarsFunc(f BuildVarsFunc) *Route { - r.buildVarsFunc = f - return r -} - -// Subrouter ------------------------------------------------------------------ - -// Subrouter creates a subrouter for the route. -// -// It will test the inner routes only if the parent route matched. For example: -// -// r := mux.NewRouter() -// s := r.Host("www.example.com").Subrouter() -// s.HandleFunc("/products/", ProductsHandler) -// s.HandleFunc("/products/{key}", ProductHandler) -// s.HandleFunc("/articles/{category}/{id:[0-9]+}"), ArticleHandler) -// -// Here, the routes registered in the subrouter won't be tested if the host -// doesn't match. -func (r *Route) Subrouter() *Router { - router := &Router{parent: r, strictSlash: r.strictSlash} - r.addMatcher(router) - return router -} - -// ---------------------------------------------------------------------------- -// URL building -// ---------------------------------------------------------------------------- - -// URL builds a URL for the route. -// -// It accepts a sequence of key/value pairs for the route variables. For -// example, given this route: -// -// r := mux.NewRouter() -// r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). -// Name("article") -// -// ...a URL for it can be built using: -// -// url, err := r.Get("article").URL("category", "technology", "id", "42") -// -// ...which will return an url.URL with the following path: -// -// "/articles/technology/42" -// -// This also works for host variables: -// -// r := mux.NewRouter() -// r.Host("{subdomain}.domain.com"). -// HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). -// Name("article") -// -// // url.String() will be "http://news.domain.com/articles/technology/42" -// url, err := r.Get("article").URL("subdomain", "news", -// "category", "technology", -// "id", "42") -// -// All variables defined in the route are required, and their values must -// conform to the corresponding patterns. -func (r *Route) URL(pairs ...string) (*url.URL, error) { - if r.err != nil { - return nil, r.err - } - if r.regexp == nil { - return nil, errors.New("mux: route doesn't have a host or path") - } - values, err := r.prepareVars(pairs...) - if err != nil { - return nil, err - } - var scheme, host, path string - queries := make([]string, 0, len(r.regexp.queries)) - if r.regexp.host != nil { - if host, err = r.regexp.host.url(values); err != nil { - return nil, err - } - scheme = "http" - if s := r.getBuildScheme(); s != "" { - scheme = s - } - } - if r.regexp.path != nil { - if path, err = r.regexp.path.url(values); err != nil { - return nil, err - } - } - for _, q := range r.regexp.queries { - var query string - if query, err = q.url(values); err != nil { - return nil, err - } - queries = append(queries, query) - } - return &url.URL{ - Scheme: scheme, - Host: host, - Path: path, - RawQuery: strings.Join(queries, "&"), - }, nil -} - -// URLHost builds the host part of the URL for a route. See Route.URL(). -// -// The route must have a host defined. -func (r *Route) URLHost(pairs ...string) (*url.URL, error) { - if r.err != nil { - return nil, r.err - } - if r.regexp == nil || r.regexp.host == nil { - return nil, errors.New("mux: route doesn't have a host") - } - values, err := r.prepareVars(pairs...) - if err != nil { - return nil, err - } - host, err := r.regexp.host.url(values) - if err != nil { - return nil, err - } - u := &url.URL{ - Scheme: "http", - Host: host, - } - if s := r.getBuildScheme(); s != "" { - u.Scheme = s - } - return u, nil -} - -// URLPath builds the path part of the URL for a route. See Route.URL(). -// -// The route must have a path defined. -func (r *Route) URLPath(pairs ...string) (*url.URL, error) { - if r.err != nil { - return nil, r.err - } - if r.regexp == nil || r.regexp.path == nil { - return nil, errors.New("mux: route doesn't have a path") - } - values, err := r.prepareVars(pairs...) - if err != nil { - return nil, err - } - path, err := r.regexp.path.url(values) - if err != nil { - return nil, err - } - return &url.URL{ - Path: path, - }, nil -} - -// GetPathTemplate returns the template used to build the -// route match. -// This is useful for building simple REST API documentation and for instrumentation -// against third-party services. -// An error will be returned if the route does not define a path. -func (r *Route) GetPathTemplate() (string, error) { - if r.err != nil { - return "", r.err - } - if r.regexp == nil || r.regexp.path == nil { - return "", errors.New("mux: route doesn't have a path") - } - return r.regexp.path.template, nil -} - -// GetPathRegexp returns the expanded regular expression used to match route path. -// This is useful for building simple REST API documentation and for instrumentation -// against third-party services. -// An error will be returned if the route does not define a path. -func (r *Route) GetPathRegexp() (string, error) { - if r.err != nil { - return "", r.err - } - if r.regexp == nil || r.regexp.path == nil { - return "", errors.New("mux: route does not have a path") - } - return r.regexp.path.regexp.String(), nil -} - -// GetMethods returns the methods the route matches against -// This is useful for building simple REST API documentation and for instrumentation -// against third-party services. -// An empty list will be returned if route does not have methods. -func (r *Route) GetMethods() ([]string, error) { - if r.err != nil { - return nil, r.err - } - for _, m := range r.matchers { - if methods, ok := m.(methodMatcher); ok { - return []string(methods), nil - } - } - return nil, nil -} - -// GetHostTemplate returns the template used to build the -// route match. -// This is useful for building simple REST API documentation and for instrumentation -// against third-party services. -// An error will be returned if the route does not define a host. -func (r *Route) GetHostTemplate() (string, error) { - if r.err != nil { - return "", r.err - } - if r.regexp == nil || r.regexp.host == nil { - return "", errors.New("mux: route doesn't have a host") - } - return r.regexp.host.template, nil -} - -// prepareVars converts the route variable pairs into a map. If the route has a -// BuildVarsFunc, it is invoked. -func (r *Route) prepareVars(pairs ...string) (map[string]string, error) { - m, err := mapFromPairsToString(pairs...) - if err != nil { - return nil, err - } - return r.buildVars(m), nil -} - -func (r *Route) buildVars(m map[string]string) map[string]string { - if r.parent != nil { - m = r.parent.buildVars(m) - } - if r.buildVarsFunc != nil { - m = r.buildVarsFunc(m) - } - return m -} - -// ---------------------------------------------------------------------------- -// parentRoute -// ---------------------------------------------------------------------------- - -// parentRoute allows routes to know about parent host and path definitions. -type parentRoute interface { - getBuildScheme() string - getNamedRoutes() map[string]*Route - getRegexpGroup() *routeRegexpGroup - buildVars(map[string]string) map[string]string -} - -func (r *Route) getBuildScheme() string { - if r.buildScheme != "" { - return r.buildScheme - } - if r.parent != nil { - return r.parent.getBuildScheme() - } - return "" -} - -// getNamedRoutes returns the map where named routes are registered. -func (r *Route) getNamedRoutes() map[string]*Route { - if r.parent == nil { - // During tests router is not always set. - r.parent = NewRouter() - } - return r.parent.getNamedRoutes() -} - -// getRegexpGroup returns regexp definitions from this route. -func (r *Route) getRegexpGroup() *routeRegexpGroup { - if r.regexp == nil { - if r.parent == nil { - // During tests router is not always set. - r.parent = NewRouter() - } - regexp := r.parent.getRegexpGroup() - if regexp == nil { - r.regexp = new(routeRegexpGroup) - } else { - // Copy. - r.regexp = &routeRegexpGroup{ - host: regexp.host, - path: regexp.path, - queries: regexp.queries, - } - } - } - return r.regexp -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt b/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt deleted file mode 100644 index 3645162..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015, Gengo, Inc. -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - * Neither the name of Gengo, Inc. nor the names of its - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go deleted file mode 100644 index 1232809..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/registry.go +++ /dev/null @@ -1,299 +0,0 @@ -package descriptor - -import ( - "fmt" - "path" - "path/filepath" - "strings" - - "github.com/golang/glog" - descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" - plugin "github.com/golang/protobuf/protoc-gen-go/plugin" -) - -// Registry is a registry of information extracted from plugin.CodeGeneratorRequest. -type Registry struct { - // msgs is a mapping from fully-qualified message name to descriptor - msgs map[string]*Message - - // enums is a mapping from fully-qualified enum name to descriptor - enums map[string]*Enum - - // files is a mapping from file path to descriptor - files map[string]*File - - // prefix is a prefix to be inserted to golang package paths generated from proto package names. - prefix string - - // pkgMap is a user-specified mapping from file path to proto package. - pkgMap map[string]string - - // pkgAliases is a mapping from package aliases to package paths in go which are already taken. - pkgAliases map[string]string - - // allowDeleteBody permits http delete methods to have a body - allowDeleteBody bool -} - -// NewRegistry returns a new Registry. -func NewRegistry() *Registry { - return &Registry{ - msgs: make(map[string]*Message), - enums: make(map[string]*Enum), - files: make(map[string]*File), - pkgMap: make(map[string]string), - pkgAliases: make(map[string]string), - } -} - -// Load loads definitions of services, methods, messages, enumerations and fields from "req". -func (r *Registry) Load(req *plugin.CodeGeneratorRequest) error { - for _, file := range req.GetProtoFile() { - r.loadFile(file) - } - - var targetPkg string - for _, name := range req.FileToGenerate { - target := r.files[name] - if target == nil { - return fmt.Errorf("no such file: %s", name) - } - name := packageIdentityName(target.FileDescriptorProto) - if targetPkg == "" { - targetPkg = name - } else { - if targetPkg != name { - return fmt.Errorf("inconsistent package names: %s %s", targetPkg, name) - } - } - - if err := r.loadServices(target); err != nil { - return err - } - } - return nil -} - -// loadFile loads messages, enumerations and fields from "file". -// It does not loads services and methods in "file". You need to call -// loadServices after loadFiles is called for all files to load services and methods. -func (r *Registry) loadFile(file *descriptor.FileDescriptorProto) { - pkg := GoPackage{ - Path: r.goPackagePath(file), - Name: defaultGoPackageName(file), - } - if err := r.ReserveGoPackageAlias(pkg.Name, pkg.Path); err != nil { - for i := 0; ; i++ { - alias := fmt.Sprintf("%s_%d", pkg.Name, i) - if err := r.ReserveGoPackageAlias(alias, pkg.Path); err == nil { - pkg.Alias = alias - break - } - } - } - f := &File{ - FileDescriptorProto: file, - GoPkg: pkg, - } - - r.files[file.GetName()] = f - r.registerMsg(f, nil, file.GetMessageType()) - r.registerEnum(f, nil, file.GetEnumType()) -} - -func (r *Registry) registerMsg(file *File, outerPath []string, msgs []*descriptor.DescriptorProto) { - for i, md := range msgs { - m := &Message{ - File: file, - Outers: outerPath, - DescriptorProto: md, - Index: i, - } - for _, fd := range md.GetField() { - m.Fields = append(m.Fields, &Field{ - Message: m, - FieldDescriptorProto: fd, - }) - } - file.Messages = append(file.Messages, m) - r.msgs[m.FQMN()] = m - glog.V(1).Infof("register name: %s", m.FQMN()) - - var outers []string - outers = append(outers, outerPath...) - outers = append(outers, m.GetName()) - r.registerMsg(file, outers, m.GetNestedType()) - r.registerEnum(file, outers, m.GetEnumType()) - } -} - -func (r *Registry) registerEnum(file *File, outerPath []string, enums []*descriptor.EnumDescriptorProto) { - for i, ed := range enums { - e := &Enum{ - File: file, - Outers: outerPath, - EnumDescriptorProto: ed, - Index: i, - } - file.Enums = append(file.Enums, e) - r.enums[e.FQEN()] = e - glog.V(1).Infof("register enum name: %s", e.FQEN()) - } -} - -// LookupMsg looks up a message type by "name". -// It tries to resolve "name" from "location" if "name" is a relative message name. -func (r *Registry) LookupMsg(location, name string) (*Message, error) { - glog.V(1).Infof("lookup %s from %s", name, location) - if strings.HasPrefix(name, ".") { - m, ok := r.msgs[name] - if !ok { - return nil, fmt.Errorf("no message found: %s", name) - } - return m, nil - } - - if !strings.HasPrefix(location, ".") { - location = fmt.Sprintf(".%s", location) - } - components := strings.Split(location, ".") - for len(components) > 0 { - fqmn := strings.Join(append(components, name), ".") - if m, ok := r.msgs[fqmn]; ok { - return m, nil - } - components = components[:len(components)-1] - } - return nil, fmt.Errorf("no message found: %s", name) -} - -// LookupEnum looks up a enum type by "name". -// It tries to resolve "name" from "location" if "name" is a relative enum name. -func (r *Registry) LookupEnum(location, name string) (*Enum, error) { - glog.V(1).Infof("lookup enum %s from %s", name, location) - if strings.HasPrefix(name, ".") { - e, ok := r.enums[name] - if !ok { - return nil, fmt.Errorf("no enum found: %s", name) - } - return e, nil - } - - if !strings.HasPrefix(location, ".") { - location = fmt.Sprintf(".%s", location) - } - components := strings.Split(location, ".") - for len(components) > 0 { - fqen := strings.Join(append(components, name), ".") - if e, ok := r.enums[fqen]; ok { - return e, nil - } - components = components[:len(components)-1] - } - return nil, fmt.Errorf("no enum found: %s", name) -} - -// LookupFile looks up a file by name. -func (r *Registry) LookupFile(name string) (*File, error) { - f, ok := r.files[name] - if !ok { - return nil, fmt.Errorf("no such file given: %s", name) - } - return f, nil -} - -// AddPkgMap adds a mapping from a .proto file to proto package name. -func (r *Registry) AddPkgMap(file, protoPkg string) { - r.pkgMap[file] = protoPkg -} - -// SetPrefix registeres the perfix to be added to go package paths generated from proto package names. -func (r *Registry) SetPrefix(prefix string) { - r.prefix = prefix -} - -// ReserveGoPackageAlias reserves the unique alias of go package. -// If succeeded, the alias will be never used for other packages in generated go files. -// If failed, the alias is already taken by another package, so you need to use another -// alias for the package in your go files. -func (r *Registry) ReserveGoPackageAlias(alias, pkgpath string) error { - if taken, ok := r.pkgAliases[alias]; ok { - if taken == pkgpath { - return nil - } - return fmt.Errorf("package name %s is already taken. Use another alias", alias) - } - r.pkgAliases[alias] = pkgpath - return nil -} - -// goPackagePath returns the go package path which go files generated from "f" should have. -// It respects the mapping registered by AddPkgMap if exists. Or use go_package as import path -// if it includes a slash, Otherwide, it generates a path from the file name of "f". -func (r *Registry) goPackagePath(f *descriptor.FileDescriptorProto) string { - name := f.GetName() - if pkg, ok := r.pkgMap[name]; ok { - return path.Join(r.prefix, pkg) - } - - gopkg := f.Options.GetGoPackage() - idx := strings.LastIndex(gopkg, "/") - if idx >= 0 { - return gopkg - } - - return path.Join(r.prefix, path.Dir(name)) -} - -// GetAllFQMNs returns a list of all FQMNs -func (r *Registry) GetAllFQMNs() []string { - var keys []string - for k := range r.msgs { - keys = append(keys, k) - } - return keys -} - -// GetAllFQENs returns a list of all FQENs -func (r *Registry) GetAllFQENs() []string { - var keys []string - for k := range r.enums { - keys = append(keys, k) - } - return keys -} - -// SetAllowDeleteBody controls whether http delete methods may have a -// body or fail loading if encountered. -func (r *Registry) SetAllowDeleteBody(allow bool) { - r.allowDeleteBody = allow -} - -// defaultGoPackageName returns the default go package name to be used for go files generated from "f". -// You might need to use an unique alias for the package when you import it. Use ReserveGoPackageAlias to get a unique alias. -func defaultGoPackageName(f *descriptor.FileDescriptorProto) string { - name := packageIdentityName(f) - return strings.Replace(name, ".", "_", -1) -} - -// packageIdentityName returns the identity of packages. -// protoc-gen-grpc-gateway rejects CodeGenerationRequests which contains more than one packages -// as protoc-gen-go does. -func packageIdentityName(f *descriptor.FileDescriptorProto) string { - if f.Options != nil && f.Options.GoPackage != nil { - gopkg := f.Options.GetGoPackage() - idx := strings.LastIndex(gopkg, "/") - if idx < 0 { - return gopkg - } - - return gopkg[idx+1:] - } - - if f.Package == nil { - base := filepath.Base(f.GetName()) - ext := filepath.Ext(base) - return strings.TrimSuffix(base, ext) - } - return f.GetPackage() -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go deleted file mode 100644 index c9dfec8..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/services.go +++ /dev/null @@ -1,266 +0,0 @@ -package descriptor - -import ( - "fmt" - "strings" - - "github.com/golang/glog" - "github.com/golang/protobuf/proto" - descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" - "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule" - options "google.golang.org/genproto/googleapis/api/annotations" -) - -// loadServices registers services and their methods from "targetFile" to "r". -// It must be called after loadFile is called for all files so that loadServices -// can resolve names of message types and their fields. -func (r *Registry) loadServices(file *File) error { - glog.V(1).Infof("Loading services from %s", file.GetName()) - var svcs []*Service - for _, sd := range file.GetService() { - glog.V(2).Infof("Registering %s", sd.GetName()) - svc := &Service{ - File: file, - ServiceDescriptorProto: sd, - } - for _, md := range sd.GetMethod() { - glog.V(2).Infof("Processing %s.%s", sd.GetName(), md.GetName()) - opts, err := extractAPIOptions(md) - if err != nil { - glog.Errorf("Failed to extract ApiMethodOptions from %s.%s: %v", svc.GetName(), md.GetName(), err) - return err - } - if opts == nil { - glog.V(1).Infof("Found non-target method: %s.%s", svc.GetName(), md.GetName()) - } - meth, err := r.newMethod(svc, md, opts) - if err != nil { - return err - } - svc.Methods = append(svc.Methods, meth) - } - if len(svc.Methods) == 0 { - continue - } - glog.V(2).Infof("Registered %s with %d method(s)", svc.GetName(), len(svc.Methods)) - svcs = append(svcs, svc) - } - file.Services = svcs - return nil -} - -func (r *Registry) newMethod(svc *Service, md *descriptor.MethodDescriptorProto, opts *options.HttpRule) (*Method, error) { - requestType, err := r.LookupMsg(svc.File.GetPackage(), md.GetInputType()) - if err != nil { - return nil, err - } - responseType, err := r.LookupMsg(svc.File.GetPackage(), md.GetOutputType()) - if err != nil { - return nil, err - } - meth := &Method{ - Service: svc, - MethodDescriptorProto: md, - RequestType: requestType, - ResponseType: responseType, - } - - newBinding := func(opts *options.HttpRule, idx int) (*Binding, error) { - var ( - httpMethod string - pathTemplate string - ) - switch { - case opts.GetGet() != "": - httpMethod = "GET" - pathTemplate = opts.GetGet() - if opts.Body != "" { - return nil, fmt.Errorf("needs request body even though http method is GET: %s", md.GetName()) - } - - case opts.GetPut() != "": - httpMethod = "PUT" - pathTemplate = opts.GetPut() - - case opts.GetPost() != "": - httpMethod = "POST" - pathTemplate = opts.GetPost() - - case opts.GetDelete() != "": - httpMethod = "DELETE" - pathTemplate = opts.GetDelete() - if opts.Body != "" && !r.allowDeleteBody { - return nil, fmt.Errorf("needs request body even though http method is DELETE: %s", md.GetName()) - } - - case opts.GetPatch() != "": - httpMethod = "PATCH" - pathTemplate = opts.GetPatch() - - case opts.GetCustom() != nil: - custom := opts.GetCustom() - httpMethod = custom.Kind - pathTemplate = custom.Path - - default: - glog.V(1).Infof("No pattern specified in google.api.HttpRule: %s", md.GetName()) - return nil, nil - } - - parsed, err := httprule.Parse(pathTemplate) - if err != nil { - return nil, err - } - tmpl := parsed.Compile() - - if md.GetClientStreaming() && len(tmpl.Fields) > 0 { - return nil, fmt.Errorf("cannot use path parameter in client streaming") - } - - b := &Binding{ - Method: meth, - Index: idx, - PathTmpl: tmpl, - HTTPMethod: httpMethod, - } - - for _, f := range tmpl.Fields { - param, err := r.newParam(meth, f) - if err != nil { - return nil, err - } - b.PathParams = append(b.PathParams, param) - } - - // TODO(yugui) Handle query params - - b.Body, err = r.newBody(meth, opts.Body) - if err != nil { - return nil, err - } - - return b, nil - } - b, err := newBinding(opts, 0) - if err != nil { - return nil, err - } - - if b != nil { - meth.Bindings = append(meth.Bindings, b) - } - for i, additional := range opts.GetAdditionalBindings() { - if len(additional.AdditionalBindings) > 0 { - return nil, fmt.Errorf("additional_binding in additional_binding not allowed: %s.%s", svc.GetName(), meth.GetName()) - } - b, err := newBinding(additional, i+1) - if err != nil { - return nil, err - } - meth.Bindings = append(meth.Bindings, b) - } - - return meth, nil -} - -func extractAPIOptions(meth *descriptor.MethodDescriptorProto) (*options.HttpRule, error) { - if meth.Options == nil { - return nil, nil - } - if !proto.HasExtension(meth.Options, options.E_Http) { - return nil, nil - } - ext, err := proto.GetExtension(meth.Options, options.E_Http) - if err != nil { - return nil, err - } - opts, ok := ext.(*options.HttpRule) - if !ok { - return nil, fmt.Errorf("extension is %T; want an HttpRule", ext) - } - return opts, nil -} - -func (r *Registry) newParam(meth *Method, path string) (Parameter, error) { - msg := meth.RequestType - fields, err := r.resolveFiledPath(msg, path) - if err != nil { - return Parameter{}, err - } - l := len(fields) - if l == 0 { - return Parameter{}, fmt.Errorf("invalid field access list for %s", path) - } - target := fields[l-1].Target - switch target.GetType() { - case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_GROUP: - return Parameter{}, fmt.Errorf("aggregate type %s in parameter of %s.%s: %s", target.Type, meth.Service.GetName(), meth.GetName(), path) - } - return Parameter{ - FieldPath: FieldPath(fields), - Method: meth, - Target: fields[l-1].Target, - }, nil -} - -func (r *Registry) newBody(meth *Method, path string) (*Body, error) { - msg := meth.RequestType - switch path { - case "": - return nil, nil - case "*": - return &Body{FieldPath: nil}, nil - } - fields, err := r.resolveFiledPath(msg, path) - if err != nil { - return nil, err - } - return &Body{FieldPath: FieldPath(fields)}, nil -} - -// lookupField looks up a field named "name" within "msg". -// It returns nil if no such field found. -func lookupField(msg *Message, name string) *Field { - for _, f := range msg.Fields { - if f.GetName() == name { - return f - } - } - return nil -} - -// resolveFieldPath resolves "path" into a list of fieldDescriptor, starting from "msg". -func (r *Registry) resolveFiledPath(msg *Message, path string) ([]FieldPathComponent, error) { - if path == "" { - return nil, nil - } - - root := msg - var result []FieldPathComponent - for i, c := range strings.Split(path, ".") { - if i > 0 { - f := result[i-1].Target - switch f.GetType() { - case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_GROUP: - var err error - msg, err = r.LookupMsg(msg.FQMN(), f.GetTypeName()) - if err != nil { - return nil, err - } - default: - return nil, fmt.Errorf("not an aggregate type: %s in %s", f.GetName(), path) - } - } - - glog.V(2).Infof("Lookup %s in %s", c, msg.FQMN()) - f := lookupField(msg, c) - if f == nil { - return nil, fmt.Errorf("no field %q found in %s", path, root.GetName()) - } - if f.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REPEATED { - return nil, fmt.Errorf("repeated field not allowed in field path: %s in %s", f.GetName(), path) - } - result = append(result, FieldPathComponent{Name: c, Target: f}) - } - return result, nil -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go deleted file mode 100644 index 248538e..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor/types.go +++ /dev/null @@ -1,322 +0,0 @@ -package descriptor - -import ( - "fmt" - "strings" - - descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" - gogen "github.com/golang/protobuf/protoc-gen-go/generator" - "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule" -) - -// GoPackage represents a golang package -type GoPackage struct { - // Path is the package path to the package. - Path string - // Name is the package name of the package - Name string - // Alias is an alias of the package unique within the current invokation of grpc-gateway generator. - Alias string -} - -// Standard returns whether the import is a golang standard package. -func (p GoPackage) Standard() bool { - return !strings.Contains(p.Path, ".") -} - -// String returns a string representation of this package in the form of import line in golang. -func (p GoPackage) String() string { - if p.Alias == "" { - return fmt.Sprintf("%q", p.Path) - } - return fmt.Sprintf("%s %q", p.Alias, p.Path) -} - -// File wraps descriptor.FileDescriptorProto for richer features. -type File struct { - *descriptor.FileDescriptorProto - // GoPkg is the go package of the go file generated from this file.. - GoPkg GoPackage - // Messages is the list of messages defined in this file. - Messages []*Message - // Enums is the list of enums defined in this file. - Enums []*Enum - // Services is the list of services defined in this file. - Services []*Service -} - -// proto2 determines if the syntax of the file is proto2. -func (f *File) proto2() bool { - return f.Syntax == nil || f.GetSyntax() == "proto2" -} - -// Message describes a protocol buffer message types -type Message struct { - // File is the file where the message is defined - File *File - // Outers is a list of outer messages if this message is a nested type. - Outers []string - *descriptor.DescriptorProto - Fields []*Field - - // Index is proto path index of this message in File. - Index int -} - -// FQMN returns a fully qualified message name of this message. -func (m *Message) FQMN() string { - components := []string{""} - if m.File.Package != nil { - components = append(components, m.File.GetPackage()) - } - components = append(components, m.Outers...) - components = append(components, m.GetName()) - return strings.Join(components, ".") -} - -// GoType returns a go type name for the message type. -// It prefixes the type name with the package alias if -// its belonging package is not "currentPackage". -func (m *Message) GoType(currentPackage string) string { - var components []string - components = append(components, m.Outers...) - components = append(components, m.GetName()) - - name := strings.Join(components, "_") - if m.File.GoPkg.Path == currentPackage { - return name - } - pkg := m.File.GoPkg.Name - if alias := m.File.GoPkg.Alias; alias != "" { - pkg = alias - } - return fmt.Sprintf("%s.%s", pkg, name) -} - -// Enum describes a protocol buffer enum types -type Enum struct { - // File is the file where the enum is defined - File *File - // Outers is a list of outer messages if this enum is a nested type. - Outers []string - *descriptor.EnumDescriptorProto - - Index int -} - -// FQEN returns a fully qualified enum name of this enum. -func (e *Enum) FQEN() string { - components := []string{""} - if e.File.Package != nil { - components = append(components, e.File.GetPackage()) - } - components = append(components, e.Outers...) - components = append(components, e.GetName()) - return strings.Join(components, ".") -} - -// Service wraps descriptor.ServiceDescriptorProto for richer features. -type Service struct { - // File is the file where this service is defined. - File *File - *descriptor.ServiceDescriptorProto - // Methods is the list of methods defined in this service. - Methods []*Method -} - -// Method wraps descriptor.MethodDescriptorProto for richer features. -type Method struct { - // Service is the service which this method belongs to. - Service *Service - *descriptor.MethodDescriptorProto - - // RequestType is the message type of requests to this method. - RequestType *Message - // ResponseType is the message type of responses from this method. - ResponseType *Message - Bindings []*Binding -} - -// Binding describes how an HTTP endpoint is bound to a gRPC method. -type Binding struct { - // Method is the method which the endpoint is bound to. - Method *Method - // Index is a zero-origin index of the binding in the target method - Index int - // PathTmpl is path template where this method is mapped to. - PathTmpl httprule.Template - // HTTPMethod is the HTTP method which this method is mapped to. - HTTPMethod string - // PathParams is the list of parameters provided in HTTP request paths. - PathParams []Parameter - // Body describes parameters provided in HTTP request body. - Body *Body -} - -// ExplicitParams returns a list of explicitly bound parameters of "b", -// i.e. a union of field path for body and field paths for path parameters. -func (b *Binding) ExplicitParams() []string { - var result []string - if b.Body != nil { - result = append(result, b.Body.FieldPath.String()) - } - for _, p := range b.PathParams { - result = append(result, p.FieldPath.String()) - } - return result -} - -// Field wraps descriptor.FieldDescriptorProto for richer features. -type Field struct { - // Message is the message type which this field belongs to. - Message *Message - // FieldMessage is the message type of the field. - FieldMessage *Message - *descriptor.FieldDescriptorProto -} - -// Parameter is a parameter provided in http requests -type Parameter struct { - // FieldPath is a path to a proto field which this parameter is mapped to. - FieldPath - // Target is the proto field which this parameter is mapped to. - Target *Field - // Method is the method which this parameter is used for. - Method *Method -} - -// ConvertFuncExpr returns a go expression of a converter function. -// The converter function converts a string into a value for the parameter. -func (p Parameter) ConvertFuncExpr() (string, error) { - tbl := proto3ConvertFuncs - if p.Target.Message.File.proto2() { - tbl = proto2ConvertFuncs - } - typ := p.Target.GetType() - conv, ok := tbl[typ] - if !ok { - return "", fmt.Errorf("unsupported field type %s of parameter %s in %s.%s", typ, p.FieldPath, p.Method.Service.GetName(), p.Method.GetName()) - } - return conv, nil -} - -// Body describes a http requtest body to be sent to the method. -type Body struct { - // FieldPath is a path to a proto field which the request body is mapped to. - // The request body is mapped to the request type itself if FieldPath is empty. - FieldPath FieldPath -} - -// RHS returns a right-hand-side expression in go to be used to initialize method request object. -// It starts with "msgExpr", which is the go expression of the method request object. -func (b Body) RHS(msgExpr string) string { - return b.FieldPath.RHS(msgExpr) -} - -// FieldPath is a path to a field from a request message. -type FieldPath []FieldPathComponent - -// String returns a string representation of the field path. -func (p FieldPath) String() string { - var components []string - for _, c := range p { - components = append(components, c.Name) - } - return strings.Join(components, ".") -} - -// IsNestedProto3 indicates whether the FieldPath is a nested Proto3 path. -func (p FieldPath) IsNestedProto3() bool { - if len(p) > 1 && !p[0].Target.Message.File.proto2() { - return true - } - return false -} - -// RHS is a right-hand-side expression in go to be used to assign a value to the target field. -// It starts with "msgExpr", which is the go expression of the method request object. -func (p FieldPath) RHS(msgExpr string) string { - l := len(p) - if l == 0 { - return msgExpr - } - components := []string{msgExpr} - for i, c := range p { - if i == l-1 { - components = append(components, c.RHS()) - continue - } - components = append(components, c.LHS()) - } - return strings.Join(components, ".") -} - -// FieldPathComponent is a path component in FieldPath -type FieldPathComponent struct { - // Name is a name of the proto field which this component corresponds to. - // TODO(yugui) is this necessary? - Name string - // Target is the proto field which this component corresponds to. - Target *Field -} - -// RHS returns a right-hand-side expression in go for this field. -func (c FieldPathComponent) RHS() string { - return gogen.CamelCase(c.Name) -} - -// LHS returns a left-hand-side expression in go for this field. -func (c FieldPathComponent) LHS() string { - if c.Target.Message.File.proto2() { - return fmt.Sprintf("Get%s()", gogen.CamelCase(c.Name)) - } - return gogen.CamelCase(c.Name) -} - -var ( - proto3ConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{ - descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64", - descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32", - descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64", - descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64", - descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32", - descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64", - descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32", - descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.Bool", - descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.String", - // FieldDescriptorProto_TYPE_GROUP - // FieldDescriptorProto_TYPE_MESSAGE - // FieldDescriptorProto_TYPE_BYTES - // TODO(yugui) Handle bytes - descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32", - // FieldDescriptorProto_TYPE_ENUM - // TODO(yugui) Handle Enum - descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32", - descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64", - descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32", - descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64", - } - - proto2ConvertFuncs = map[descriptor.FieldDescriptorProto_Type]string{ - descriptor.FieldDescriptorProto_TYPE_DOUBLE: "runtime.Float64P", - descriptor.FieldDescriptorProto_TYPE_FLOAT: "runtime.Float32P", - descriptor.FieldDescriptorProto_TYPE_INT64: "runtime.Int64P", - descriptor.FieldDescriptorProto_TYPE_UINT64: "runtime.Uint64P", - descriptor.FieldDescriptorProto_TYPE_INT32: "runtime.Int32P", - descriptor.FieldDescriptorProto_TYPE_FIXED64: "runtime.Uint64P", - descriptor.FieldDescriptorProto_TYPE_FIXED32: "runtime.Uint32P", - descriptor.FieldDescriptorProto_TYPE_BOOL: "runtime.BoolP", - descriptor.FieldDescriptorProto_TYPE_STRING: "runtime.StringP", - // FieldDescriptorProto_TYPE_GROUP - // FieldDescriptorProto_TYPE_MESSAGE - // FieldDescriptorProto_TYPE_BYTES - // TODO(yugui) Handle bytes - descriptor.FieldDescriptorProto_TYPE_UINT32: "runtime.Uint32P", - // FieldDescriptorProto_TYPE_ENUM - // TODO(yugui) Handle Enum - descriptor.FieldDescriptorProto_TYPE_SFIXED32: "runtime.Int32P", - descriptor.FieldDescriptorProto_TYPE_SFIXED64: "runtime.Int64P", - descriptor.FieldDescriptorProto_TYPE_SINT32: "runtime.Int32P", - descriptor.FieldDescriptorProto_TYPE_SINT64: "runtime.Int64P", - } -) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go deleted file mode 100644 index 437039a..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/compile.go +++ /dev/null @@ -1,117 +0,0 @@ -package httprule - -import ( - "github.com/grpc-ecosystem/grpc-gateway/utilities" -) - -const ( - opcodeVersion = 1 -) - -// Template is a compiled representation of path templates. -type Template struct { - // Version is the version number of the format. - Version int - // OpCodes is a sequence of operations. - OpCodes []int - // Pool is a constant pool - Pool []string - // Verb is a VERB part in the template. - Verb string - // Fields is a list of field paths bound in this template. - Fields []string - // Original template (example: /v1/a_bit_of_everything) - Template string -} - -// Compiler compiles utilities representation of path templates into marshallable operations. -// They can be unmarshalled by runtime.NewPattern. -type Compiler interface { - Compile() Template -} - -type op struct { - // code is the opcode of the operation - code utilities.OpCode - - // str is a string operand of the code. - // num is ignored if str is not empty. - str string - - // num is a numeric operand of the code. - num int -} - -func (w wildcard) compile() []op { - return []op{ - {code: utilities.OpPush}, - } -} - -func (w deepWildcard) compile() []op { - return []op{ - {code: utilities.OpPushM}, - } -} - -func (l literal) compile() []op { - return []op{ - { - code: utilities.OpLitPush, - str: string(l), - }, - } -} - -func (v variable) compile() []op { - var ops []op - for _, s := range v.segments { - ops = append(ops, s.compile()...) - } - ops = append(ops, op{ - code: utilities.OpConcatN, - num: len(v.segments), - }, op{ - code: utilities.OpCapture, - str: v.path, - }) - - return ops -} - -func (t template) Compile() Template { - var rawOps []op - for _, s := range t.segments { - rawOps = append(rawOps, s.compile()...) - } - - var ( - ops []int - pool []string - fields []string - ) - consts := make(map[string]int) - for _, op := range rawOps { - ops = append(ops, int(op.code)) - if op.str == "" { - ops = append(ops, op.num) - } else { - if _, ok := consts[op.str]; !ok { - consts[op.str] = len(pool) - pool = append(pool, op.str) - } - ops = append(ops, consts[op.str]) - } - if op.code == utilities.OpCapture { - fields = append(fields, op.str) - } - } - return Template{ - Version: opcodeVersion, - OpCodes: ops, - Pool: pool, - Verb: t.verb, - Fields: fields, - Template: t.template, - } -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go deleted file mode 100644 index 3be7426..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/parse.go +++ /dev/null @@ -1,351 +0,0 @@ -package httprule - -import ( - "fmt" - "strings" - - "github.com/golang/glog" -) - -// InvalidTemplateError indicates that the path template is not valid. -type InvalidTemplateError struct { - tmpl string - msg string -} - -func (e InvalidTemplateError) Error() string { - return fmt.Sprintf("%s: %s", e.msg, e.tmpl) -} - -// Parse parses the string representation of path template -func Parse(tmpl string) (Compiler, error) { - if !strings.HasPrefix(tmpl, "/") { - return template{}, InvalidTemplateError{tmpl: tmpl, msg: "no leading /"} - } - tokens, verb := tokenize(tmpl[1:]) - - p := parser{tokens: tokens} - segs, err := p.topLevelSegments() - if err != nil { - return template{}, InvalidTemplateError{tmpl: tmpl, msg: err.Error()} - } - - return template{ - segments: segs, - verb: verb, - template: tmpl, - }, nil -} - -func tokenize(path string) (tokens []string, verb string) { - if path == "" { - return []string{eof}, "" - } - - const ( - init = iota - field - nested - ) - var ( - st = init - ) - for path != "" { - var idx int - switch st { - case init: - idx = strings.IndexAny(path, "/{") - case field: - idx = strings.IndexAny(path, ".=}") - case nested: - idx = strings.IndexAny(path, "/}") - } - if idx < 0 { - tokens = append(tokens, path) - break - } - switch r := path[idx]; r { - case '/', '.': - case '{': - st = field - case '=': - st = nested - case '}': - st = init - } - if idx == 0 { - tokens = append(tokens, path[idx:idx+1]) - } else { - tokens = append(tokens, path[:idx], path[idx:idx+1]) - } - path = path[idx+1:] - } - - l := len(tokens) - t := tokens[l-1] - if idx := strings.LastIndex(t, ":"); idx == 0 { - tokens, verb = tokens[:l-1], t[1:] - } else if idx > 0 { - tokens[l-1], verb = t[:idx], t[idx+1:] - } - tokens = append(tokens, eof) - return tokens, verb -} - -// parser is a parser of the template syntax defined in github.com/googleapis/googleapis/google/api/http.proto. -type parser struct { - tokens []string - accepted []string -} - -// topLevelSegments is the target of this parser. -func (p *parser) topLevelSegments() ([]segment, error) { - glog.V(1).Infof("Parsing %q", p.tokens) - segs, err := p.segments() - if err != nil { - return nil, err - } - glog.V(2).Infof("accept segments: %q; %q", p.accepted, p.tokens) - if _, err := p.accept(typeEOF); err != nil { - return nil, fmt.Errorf("unexpected token %q after segments %q", p.tokens[0], strings.Join(p.accepted, "")) - } - glog.V(2).Infof("accept eof: %q; %q", p.accepted, p.tokens) - return segs, nil -} - -func (p *parser) segments() ([]segment, error) { - s, err := p.segment() - if err != nil { - return nil, err - } - glog.V(2).Infof("accept segment: %q; %q", p.accepted, p.tokens) - - segs := []segment{s} - for { - if _, err := p.accept("/"); err != nil { - return segs, nil - } - s, err := p.segment() - if err != nil { - return segs, err - } - segs = append(segs, s) - glog.V(2).Infof("accept segment: %q; %q", p.accepted, p.tokens) - } -} - -func (p *parser) segment() (segment, error) { - if _, err := p.accept("*"); err == nil { - return wildcard{}, nil - } - if _, err := p.accept("**"); err == nil { - return deepWildcard{}, nil - } - if l, err := p.literal(); err == nil { - return l, nil - } - - v, err := p.variable() - if err != nil { - return nil, fmt.Errorf("segment neither wildcards, literal or variable: %v", err) - } - return v, err -} - -func (p *parser) literal() (segment, error) { - lit, err := p.accept(typeLiteral) - if err != nil { - return nil, err - } - return literal(lit), nil -} - -func (p *parser) variable() (segment, error) { - if _, err := p.accept("{"); err != nil { - return nil, err - } - - path, err := p.fieldPath() - if err != nil { - return nil, err - } - - var segs []segment - if _, err := p.accept("="); err == nil { - segs, err = p.segments() - if err != nil { - return nil, fmt.Errorf("invalid segment in variable %q: %v", path, err) - } - } else { - segs = []segment{wildcard{}} - } - - if _, err := p.accept("}"); err != nil { - return nil, fmt.Errorf("unterminated variable segment: %s", path) - } - return variable{ - path: path, - segments: segs, - }, nil -} - -func (p *parser) fieldPath() (string, error) { - c, err := p.accept(typeIdent) - if err != nil { - return "", err - } - components := []string{c} - for { - if _, err = p.accept("."); err != nil { - return strings.Join(components, "."), nil - } - c, err := p.accept(typeIdent) - if err != nil { - return "", fmt.Errorf("invalid field path component: %v", err) - } - components = append(components, c) - } -} - -// A termType is a type of terminal symbols. -type termType string - -// These constants define some of valid values of termType. -// They improve readability of parse functions. -// -// You can also use "/", "*", "**", "." or "=" as valid values. -const ( - typeIdent = termType("ident") - typeLiteral = termType("literal") - typeEOF = termType("$") -) - -const ( - // eof is the terminal symbol which always appears at the end of token sequence. - eof = "\u0000" -) - -// accept tries to accept a token in "p". -// This function consumes a token and returns it if it matches to the specified "term". -// If it doesn't match, the function does not consume any tokens and return an error. -func (p *parser) accept(term termType) (string, error) { - t := p.tokens[0] - switch term { - case "/", "*", "**", ".", "=", "{", "}": - if t != string(term) { - return "", fmt.Errorf("expected %q but got %q", term, t) - } - case typeEOF: - if t != eof { - return "", fmt.Errorf("expected EOF but got %q", t) - } - case typeIdent: - if err := expectIdent(t); err != nil { - return "", err - } - case typeLiteral: - if err := expectPChars(t); err != nil { - return "", err - } - default: - return "", fmt.Errorf("unknown termType %q", term) - } - p.tokens = p.tokens[1:] - p.accepted = append(p.accepted, t) - return t, nil -} - -// expectPChars determines if "t" consists of only pchars defined in RFC3986. -// -// https://www.ietf.org/rfc/rfc3986.txt, P.49 -// pchar = unreserved / pct-encoded / sub-delims / ":" / "@" -// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" -// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" -// / "*" / "+" / "," / ";" / "=" -// pct-encoded = "%" HEXDIG HEXDIG -func expectPChars(t string) error { - const ( - init = iota - pct1 - pct2 - ) - st := init - for _, r := range t { - if st != init { - if !isHexDigit(r) { - return fmt.Errorf("invalid hexdigit: %c(%U)", r, r) - } - switch st { - case pct1: - st = pct2 - case pct2: - st = init - } - continue - } - - // unreserved - switch { - case 'A' <= r && r <= 'Z': - continue - case 'a' <= r && r <= 'z': - continue - case '0' <= r && r <= '9': - continue - } - switch r { - case '-', '.', '_', '~': - // unreserved - case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=': - // sub-delims - case ':', '@': - // rest of pchar - case '%': - // pct-encoded - st = pct1 - default: - return fmt.Errorf("invalid character in path segment: %q(%U)", r, r) - } - } - if st != init { - return fmt.Errorf("invalid percent-encoding in %q", t) - } - return nil -} - -// expectIdent determines if "ident" is a valid identifier in .proto schema ([[:alpha:]_][[:alphanum:]_]*). -func expectIdent(ident string) error { - if ident == "" { - return fmt.Errorf("empty identifier") - } - for pos, r := range ident { - switch { - case '0' <= r && r <= '9': - if pos == 0 { - return fmt.Errorf("identifier starting with digit: %s", ident) - } - continue - case 'A' <= r && r <= 'Z': - continue - case 'a' <= r && r <= 'z': - continue - case r == '_': - continue - default: - return fmt.Errorf("invalid character %q(%U) in identifier: %s", r, r, ident) - } - } - return nil -} - -func isHexDigit(r rune) bool { - switch { - case '0' <= r && r <= '9': - return true - case 'A' <= r && r <= 'F': - return true - case 'a' <= r && r <= 'f': - return true - } - return false -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go deleted file mode 100644 index 5a814a0..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule/types.go +++ /dev/null @@ -1,60 +0,0 @@ -package httprule - -import ( - "fmt" - "strings" -) - -type template struct { - segments []segment - verb string - template string -} - -type segment interface { - fmt.Stringer - compile() (ops []op) -} - -type wildcard struct{} - -type deepWildcard struct{} - -type literal string - -type variable struct { - path string - segments []segment -} - -func (wildcard) String() string { - return "*" -} - -func (deepWildcard) String() string { - return "**" -} - -func (l literal) String() string { - return string(l) -} - -func (v variable) String() string { - var segs []string - for _, s := range v.segments { - segs = append(segs, s.String()) - } - return fmt.Sprintf("{%s=%s}", v.path, strings.Join(segs, "/")) -} - -func (t template) String() string { - var segs []string - for _, s := range t.segments { - segs = append(segs, s.String()) - } - str := strings.Join(segs, "/") - if t.verb != "" { - str = fmt.Sprintf("%s:%s", str, t.verb) - } - return "/" + str -} diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go deleted file mode 100644 index cf79a4d..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package utilities provides members for internal use in grpc-gateway. -package utilities diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go deleted file mode 100644 index 28ad946..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/pattern.go +++ /dev/null @@ -1,22 +0,0 @@ -package utilities - -// An OpCode is a opcode of compiled path patterns. -type OpCode int - -// These constants are the valid values of OpCode. -const ( - // OpNop does nothing - OpNop = OpCode(iota) - // OpPush pushes a component to stack - OpPush - // OpLitPush pushes a component to stack if it matches to the literal - OpLitPush - // OpPushM concatenates the remaining components and pushes it to stack - OpPushM - // OpConcatN pops N items from stack, concatenates them and pushes it back to stack - OpConcatN - // OpCapture pops an item and binds it to the variable - OpCapture - // OpEnd is the least postive invalid opcode. - OpEnd -) diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go deleted file mode 100644 index c2b7b30..0000000 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/utilities/trie.go +++ /dev/null @@ -1,177 +0,0 @@ -package utilities - -import ( - "sort" -) - -// DoubleArray is a Double Array implementation of trie on sequences of strings. -type DoubleArray struct { - // Encoding keeps an encoding from string to int - Encoding map[string]int - // Base is the base array of Double Array - Base []int - // Check is the check array of Double Array - Check []int -} - -// NewDoubleArray builds a DoubleArray from a set of sequences of strings. -func NewDoubleArray(seqs [][]string) *DoubleArray { - da := &DoubleArray{Encoding: make(map[string]int)} - if len(seqs) == 0 { - return da - } - - encoded := registerTokens(da, seqs) - sort.Sort(byLex(encoded)) - - root := node{row: -1, col: -1, left: 0, right: len(encoded)} - addSeqs(da, encoded, 0, root) - - for i := len(da.Base); i > 0; i-- { - if da.Check[i-1] != 0 { - da.Base = da.Base[:i] - da.Check = da.Check[:i] - break - } - } - return da -} - -func registerTokens(da *DoubleArray, seqs [][]string) [][]int { - var result [][]int - for _, seq := range seqs { - var encoded []int - for _, token := range seq { - if _, ok := da.Encoding[token]; !ok { - da.Encoding[token] = len(da.Encoding) - } - encoded = append(encoded, da.Encoding[token]) - } - result = append(result, encoded) - } - for i := range result { - result[i] = append(result[i], len(da.Encoding)) - } - return result -} - -type node struct { - row, col int - left, right int -} - -func (n node) value(seqs [][]int) int { - return seqs[n.row][n.col] -} - -func (n node) children(seqs [][]int) []*node { - var result []*node - lastVal := int(-1) - last := new(node) - for i := n.left; i < n.right; i++ { - if lastVal == seqs[i][n.col+1] { - continue - } - last.right = i - last = &node{ - row: i, - col: n.col + 1, - left: i, - } - result = append(result, last) - } - last.right = n.right - return result -} - -func addSeqs(da *DoubleArray, seqs [][]int, pos int, n node) { - ensureSize(da, pos) - - children := n.children(seqs) - var i int - for i = 1; ; i++ { - ok := func() bool { - for _, child := range children { - code := child.value(seqs) - j := i + code - ensureSize(da, j) - if da.Check[j] != 0 { - return false - } - } - return true - }() - if ok { - break - } - } - da.Base[pos] = i - for _, child := range children { - code := child.value(seqs) - j := i + code - da.Check[j] = pos + 1 - } - terminator := len(da.Encoding) - for _, child := range children { - code := child.value(seqs) - if code == terminator { - continue - } - j := i + code - addSeqs(da, seqs, j, *child) - } -} - -func ensureSize(da *DoubleArray, i int) { - for i >= len(da.Base) { - da.Base = append(da.Base, make([]int, len(da.Base)+1)...) - da.Check = append(da.Check, make([]int, len(da.Check)+1)...) - } -} - -type byLex [][]int - -func (l byLex) Len() int { return len(l) } -func (l byLex) Swap(i, j int) { l[i], l[j] = l[j], l[i] } -func (l byLex) Less(i, j int) bool { - si := l[i] - sj := l[j] - var k int - for k = 0; k < len(si) && k < len(sj); k++ { - if si[k] < sj[k] { - return true - } - if si[k] > sj[k] { - return false - } - } - if k < len(sj) { - return true - } - return false -} - -// HasCommonPrefix determines if any sequence in the DoubleArray is a prefix of the given sequence. -func (da *DoubleArray) HasCommonPrefix(seq []string) bool { - if len(da.Base) == 0 { - return false - } - - var i int - for _, t := range seq { - code, ok := da.Encoding[t] - if !ok { - break - } - j := da.Base[i] + code - if len(da.Check) <= j || da.Check[j] != i+1 { - break - } - i = j - } - j := da.Base[i] + len(da.Encoding) - if len(da.Check) <= j || da.Check[j] != i+1 { - return false - } - return true -} diff --git a/vendor/github.com/huandu/xstrings/.gitignore b/vendor/github.com/huandu/xstrings/.gitignore deleted file mode 100644 index daf913b..0000000 --- a/vendor/github.com/huandu/xstrings/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/vendor/github.com/huandu/xstrings/CONTRIBUTING.md b/vendor/github.com/huandu/xstrings/CONTRIBUTING.md deleted file mode 100644 index d7b4b8d..0000000 --- a/vendor/github.com/huandu/xstrings/CONTRIBUTING.md +++ /dev/null @@ -1,23 +0,0 @@ -# Contributing # - -Thanks for your contribution in advance. No matter what you will contribute to this project, pull request or bug report or feature discussion, it's always highly appreciated. - -## New API or feature ## - -I want to speak more about how to add new functions to this package. - -Package `xstring` is a collection of useful string functions which should be implemented in Go. It's a bit subject to say which function should be included and which should not. I set up following rules in order to make it clear and as objective as possible. - -* Rule 1: Only string algorithm, which takes string as input, can be included. -* Rule 2: If a function has been implemented in package `string`, it must not be included. -* Rule 3: If a function is not language neutral, it must not be included. -* Rule 4: If a function is a part of standard library in other languages, it can be included. -* Rule 5: If a function is quite useful in some famous framework or library, it can be included. - -New function must be discussed in project issues before submitting any code. If a pull request with new functions is sent without any ref issue, it will be rejected. - -## Pull request ## - -Pull request is always welcome. Just make sure you have run `go fmt` and all test cases passed before submit. - -If the pull request is to add a new API or feature, don't forget to update README.md and add new API in function list. diff --git a/vendor/github.com/huandu/xstrings/LICENSE b/vendor/github.com/huandu/xstrings/LICENSE deleted file mode 100644 index 2701772..0000000 --- a/vendor/github.com/huandu/xstrings/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Huan Du - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/vendor/github.com/huandu/xstrings/README.md b/vendor/github.com/huandu/xstrings/README.md deleted file mode 100644 index 7f68f49..0000000 --- a/vendor/github.com/huandu/xstrings/README.md +++ /dev/null @@ -1,114 +0,0 @@ -# xstrings # - -[![Build Status](https://travis-ci.org/huandu/xstrings.png?branch=master)](https://travis-ci.org/huandu/xstrings) -[![GoDoc](https://godoc.org/github.com/huandu/xstrings?status.svg)](https://godoc.org/github.com/huandu/xstrings) - -Go package [xstrings](https://godoc.org/github.com/huandu/xstrings) is a collection of string functions, which are widely used in other languages but absent in Go package [strings](http://golang.org/pkg/strings). - -All functions are well tested and carefully tuned for performance. - -## Propose a new function ## - -Please review [contributing guideline](CONTRIBUTING.md) and [create new issue](https://github.com/huandu/xstrings/issues) to state why it should be included. - -## Install ## - -Use `go get` to install this library. - - go get github.com/huandu/xstrings - -## API document ## - -See [GoDoc](https://godoc.org/github.com/huandu/xstrings) for full document. - -## Function list ## - -Go functions have a unique naming style. One, who has experience in other language but new in Go, may have difficulties to find out right string function to use. - -Here is a list of functions in [strings](http://golang.org/pkg/strings) and [xstrings](https://godoc.org/github.com/huandu/xstrings) with enough extra information about how to map these functions to their friends in other languages. Hope this list could be helpful for fresh gophers. - -### Package `xstrings` functions ### - -*Keep this table sorted by Function in ascending order.* - -| Function | Friends | # | -| -------- | ------- | --- | -| [Center](https://godoc.org/github.com/huandu/xstrings#Center) | `str.center` in Python; `String#center` in Ruby | [#30](https://github.com/huandu/xstrings/issues/30) | -| [Count](https://godoc.org/github.com/huandu/xstrings#Count) | `String#count` in Ruby | [#16](https://github.com/huandu/xstrings/issues/16) | -| [Delete](https://godoc.org/github.com/huandu/xstrings#Delete) | `String#delete` in Ruby | [#17](https://github.com/huandu/xstrings/issues/17) | -| [ExpandTabs](https://godoc.org/github.com/huandu/xstrings#ExpandTabs) | `str.expandtabs` in Python | [#27](https://github.com/huandu/xstrings/issues/27) | -| [FirstRuneToLower](https://godoc.org/github.com/huandu/xstrings#FirstRuneToLower) | `lcfirst` in PHP or Perl | [#15](https://github.com/huandu/xstrings/issues/15) | -| [FirstRuneToUpper](https://godoc.org/github.com/huandu/xstrings#FirstRuneToUpper) | `String#capitalize` in Ruby; `ucfirst` in PHP or Perl | [#15](https://github.com/huandu/xstrings/issues/15) | -| [Insert](https://godoc.org/github.com/huandu/xstrings#Insert) | `String#insert` in Ruby | [#18](https://github.com/huandu/xstrings/issues/18) | -| [LastPartition](https://godoc.org/github.com/huandu/xstrings#LastPartition) | `str.rpartition` in Python; `String#rpartition` in Ruby | [#19](https://github.com/huandu/xstrings/issues/19) | -| [LeftJustify](https://godoc.org/github.com/huandu/xstrings#LeftJustify) | `str.ljust` in Python; `String#ljust` in Ruby | [#28](https://github.com/huandu/xstrings/issues/28) | -| [Len](https://godoc.org/github.com/huandu/xstrings#Len) | `mb_strlen` in PHP | [#23](https://github.com/huandu/xstrings/issues/23) | -| [Partition](https://godoc.org/github.com/huandu/xstrings#Partition) | `str.partition` in Python; `String#partition` in Ruby | [#10](https://github.com/huandu/xstrings/issues/10) | -| [Reverse](https://godoc.org/github.com/huandu/xstrings#Reverse) | `String#reverse` in Ruby; `strrev` in PHP; `reverse` in Perl | [#7](https://github.com/huandu/xstrings/issues/7) | -| [RightJustify](https://godoc.org/github.com/huandu/xstrings#RightJustify) | `str.rjust` in Python; `String#rjust` in Ruby | [#29](https://github.com/huandu/xstrings/issues/29) | -| [RuneWidth](https://godoc.org/github.com/huandu/xstrings#RuneWidth) | - | [#27](https://github.com/huandu/xstrings/issues/27) | -| [Scrub](https://godoc.org/github.com/huandu/xstrings#Scrub) | `String#scrub` in Ruby | [#20](https://github.com/huandu/xstrings/issues/20) | -| [Shuffle](https://godoc.org/github.com/huandu/xstrings#Shuffle) | `str_shuffle` in PHP | [#13](https://github.com/huandu/xstrings/issues/13) | -| [ShuffleSource](https://godoc.org/github.com/huandu/xstrings#ShuffleSource) | `str_shuffle` in PHP | [#13](https://github.com/huandu/xstrings/issues/13) | -| [Slice](https://godoc.org/github.com/huandu/xstrings#Slice) | `mb_substr` in PHP | [#9](https://github.com/huandu/xstrings/issues/9) | -| [Squeeze](https://godoc.org/github.com/huandu/xstrings#Squeeze) | `String#squeeze` in Ruby | [#11](https://github.com/huandu/xstrings/issues/11) | -| [Successor](https://godoc.org/github.com/huandu/xstrings#Successor) | `String#succ` or `String#next` in Ruby | [#22](https://github.com/huandu/xstrings/issues/22) | -| [SwapCase](https://godoc.org/github.com/huandu/xstrings#SwapCase) | `str.swapcase` in Python; `String#swapcase` in Ruby | [#12](https://github.com/huandu/xstrings/issues/12) | -| [ToCamelCase](https://godoc.org/github.com/huandu/xstrings#ToCamelCase) | `String#camelize` in RoR | [#1](https://github.com/huandu/xstrings/issues/1) | -| [ToSnakeCase](https://godoc.org/github.com/huandu/xstrings#ToSnakeCase) | `String#underscore` in RoR | [#1](https://github.com/huandu/xstrings/issues/1) | -| [Translate](https://godoc.org/github.com/huandu/xstrings#Translate) | `str.translate` in Python; `String#tr` in Ruby; `strtr` in PHP; `tr///` in Perl | [#21](https://github.com/huandu/xstrings/issues/21) | -| [Width](https://godoc.org/github.com/huandu/xstrings#Width) | `mb_strwidth` in PHP | [#26](https://github.com/huandu/xstrings/issues/26) | -| [WordCount](https://godoc.org/github.com/huandu/xstrings#WordCount) | `str_word_count` in PHP | [#14](https://github.com/huandu/xstrings/issues/14) | -| [WordSplit](https://godoc.org/github.com/huandu/xstrings#WordSplit) | - | [#14](https://github.com/huandu/xstrings/issues/14) | - -### Package `strings` functions ### - -*Keep this table sorted by Function in ascending order.* - -| Function | Friends | -| -------- | ------- | -| [Contains](http://golang.org/pkg/strings/#Contains) | `String#include?` in Ruby | -| [ContainsAny](http://golang.org/pkg/strings/#ContainsAny) | - | -| [ContainsRune](http://golang.org/pkg/strings/#ContainsRune) | - | -| [Count](http://golang.org/pkg/strings/#Count) | `str.count` in Python; `substr_count` in PHP | -| [EqualFold](http://golang.org/pkg/strings/#EqualFold) | `stricmp` in PHP; `String#casecmp` in Ruby | -| [Fields](http://golang.org/pkg/strings/#Fields) | `str.split` in Python; `split` in Perl; `String#split` in Ruby | -| [FieldsFunc](http://golang.org/pkg/strings/#FieldsFunc) | - | -| [HasPrefix](http://golang.org/pkg/strings/#HasPrefix) | `str.startswith` in Python; `String#start_with?` in Ruby | -| [HasSuffix](http://golang.org/pkg/strings/#HasSuffix) | `str.endswith` in Python; `String#end_with?` in Ruby | -| [Index](http://golang.org/pkg/strings/#Index) | `str.index` in Python; `String#index` in Ruby; `strpos` in PHP; `index` in Perl | -| [IndexAny](http://golang.org/pkg/strings/#IndexAny) | - | -| [IndexByte](http://golang.org/pkg/strings/#IndexByte) | - | -| [IndexFunc](http://golang.org/pkg/strings/#IndexFunc) | - | -| [IndexRune](http://golang.org/pkg/strings/#IndexRune) | - | -| [Join](http://golang.org/pkg/strings/#Join) | `str.join` in Python; `Array#join` in Ruby; `implode` in PHP; `join` in Perl | -| [LastIndex](http://golang.org/pkg/strings/#LastIndex) | `str.rindex` in Python; `String#rindex`; `strrpos` in PHP; `rindex` in Perl | -| [LastIndexAny](http://golang.org/pkg/strings/#LastIndexAny) | - | -| [LastIndexFunc](http://golang.org/pkg/strings/#LastIndexFunc) | - | -| [Map](http://golang.org/pkg/strings/#Map) | `String#each_codepoint` in Ruby | -| [Repeat](http://golang.org/pkg/strings/#Repeat) | operator `*` in Python and Ruby; `str_repeat` in PHP | -| [Replace](http://golang.org/pkg/strings/#Replace) | `str.replace` in Python; `String#sub` in Ruby; `str_replace` in PHP | -| [Split](http://golang.org/pkg/strings/#Split) | `str.split` in Python; `String#split` in Ruby; `explode` in PHP; `split` in Perl | -| [SplitAfter](http://golang.org/pkg/strings/#SplitAfter) | - | -| [SplitAfterN](http://golang.org/pkg/strings/#SplitAfterN) | - | -| [SplitN](http://golang.org/pkg/strings/#SplitN) | `str.split` in Python; `String#split` in Ruby; `explode` in PHP; `split` in Perl | -| [Title](http://golang.org/pkg/strings/#Title) | `str.title` in Python | -| [ToLower](http://golang.org/pkg/strings/#ToLower) | `str.lower` in Python; `String#downcase` in Ruby; `strtolower` in PHP; `lc` in Perl | -| [ToLowerSpecial](http://golang.org/pkg/strings/#ToLowerSpecial) | - | -| [ToTitle](http://golang.org/pkg/strings/#ToTitle) | - | -| [ToTitleSpecial](http://golang.org/pkg/strings/#ToTitleSpecial) | - | -| [ToUpper](http://golang.org/pkg/strings/#ToUpper) | `str.upper` in Python; `String#upcase` in Ruby; `strtoupper` in PHP; `uc` in Perl | -| [ToUpperSpecial](http://golang.org/pkg/strings/#ToUpperSpecial) | - | -| [Trim](http://golang.org/pkg/strings/#Trim) | `str.strip` in Python; `String#strip` in Ruby; `trim` in PHP | -| [TrimFunc](http://golang.org/pkg/strings/#TrimFunc) | - | -| [TrimLeft](http://golang.org/pkg/strings/#TrimLeft) | `str.lstrip` in Python; `String#lstrip` in Ruby; `ltrim` in PHP | -| [TrimLeftFunc](http://golang.org/pkg/strings/#TrimLeftFunc) | - | -| [TrimPrefix](http://golang.org/pkg/strings/#TrimPrefix) | - | -| [TrimRight](http://golang.org/pkg/strings/#TrimRight) | `str.rstrip` in Python; `String#rstrip` in Ruby; `rtrim` in PHP | -| [TrimRightFunc](http://golang.org/pkg/strings/#TrimRightFunc) | - | -| [TrimSpace](http://golang.org/pkg/strings/#TrimSpace) | `str.strip` in Python; `String#strip` in Ruby; `trim` in PHP | -| [TrimSuffix](http://golang.org/pkg/strings/#TrimSuffix) | `String#chomp` in Ruby; `chomp` in Perl | - -## License ## - -This library is licensed under MIT license. See LICENSE for details. diff --git a/vendor/github.com/huandu/xstrings/common.go b/vendor/github.com/huandu/xstrings/common.go deleted file mode 100644 index dbfaa2d..0000000 --- a/vendor/github.com/huandu/xstrings/common.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "bytes" -) - -const _BUFFER_INIT_GROW_SIZE_MAX = 2048 - -// Lazy initialize a buffer. -func allocBuffer(orig, cur string) *bytes.Buffer { - output := &bytes.Buffer{} - maxSize := len(orig) * 4 - - // Avoid to reserve too much memory at once. - if maxSize > _BUFFER_INIT_GROW_SIZE_MAX { - maxSize = _BUFFER_INIT_GROW_SIZE_MAX - } - - output.Grow(maxSize) - output.WriteString(orig[:len(orig)-len(cur)]) - return output -} diff --git a/vendor/github.com/huandu/xstrings/convert.go b/vendor/github.com/huandu/xstrings/convert.go deleted file mode 100644 index 78ca34d..0000000 --- a/vendor/github.com/huandu/xstrings/convert.go +++ /dev/null @@ -1,357 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "bytes" - "math/rand" - "unicode" - "unicode/utf8" -) - -// ToCamelCase can convert all lower case characters behind underscores -// to upper case character. -// Underscore character will be removed in result except following cases. -// * More than 1 underscore. -// "a__b" => "A_B" -// * At the beginning of string. -// "_a" => "_A" -// * At the end of string. -// "ab_" => "Ab_" -func ToCamelCase(str string) string { - if len(str) == 0 { - return "" - } - - buf := &bytes.Buffer{} - var r0, r1 rune - var size int - - // leading '_' will appear in output. - for len(str) > 0 { - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - if r0 != '_' { - break - } - - buf.WriteRune(r0) - } - - if len(str) == 0 { - return buf.String() - } - - buf.WriteRune(unicode.ToUpper(r0)) - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - for len(str) > 0 { - r1 = r0 - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - if r1 == '_' && r0 != '_' { - r0 = unicode.ToUpper(r0) - } else { - buf.WriteRune(r1) - } - } - - buf.WriteRune(r0) - return buf.String() -} - -// ToSnakeCase can convert all upper case characters in a string to -// underscore format. -// -// Some samples. -// "FirstName" => "first_name" -// "HTTPServer" => "http_server" -// "NoHTTPS" => "no_https" -// "GO_PATH" => "go_path" -// "GO PATH" => "go_path" // space is converted to underscore. -// "GO-PATH" => "go_path" // hyphen is converted to underscore. -func ToSnakeCase(str string) string { - if len(str) == 0 { - return "" - } - - buf := &bytes.Buffer{} - var prev, r0, r1 rune - var size int - - r0 = '_' - - for len(str) > 0 { - prev = r0 - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - switch { - case r0 == utf8.RuneError: - buf.WriteByte(byte(str[0])) - - case unicode.IsUpper(r0): - if prev != '_' { - buf.WriteRune('_') - } - - buf.WriteRune(unicode.ToLower(r0)) - - if len(str) == 0 { - break - } - - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - if !unicode.IsUpper(r0) { - buf.WriteRune(r0) - break - } - - // find next non-upper-case character and insert `_` properly. - // it's designed to convert `HTTPServer` to `http_server`. - // if there are more than 2 adjacent upper case characters in a word, - // treat them as an abbreviation plus a normal word. - for len(str) > 0 { - r1 = r0 - r0, size = utf8.DecodeRuneInString(str) - str = str[size:] - - if r0 == utf8.RuneError { - buf.WriteRune(unicode.ToLower(r1)) - buf.WriteByte(byte(str[0])) - break - } - - if !unicode.IsUpper(r0) { - if r0 == '_' || r0 == ' ' || r0 == '-' { - r0 = '_' - - buf.WriteRune(unicode.ToLower(r1)) - } else { - buf.WriteRune('_') - buf.WriteRune(unicode.ToLower(r1)) - buf.WriteRune(r0) - } - - break - } - - buf.WriteRune(unicode.ToLower(r1)) - } - - if len(str) == 0 || r0 == '_' { - buf.WriteRune(unicode.ToLower(r0)) - break - } - - default: - if r0 == ' ' || r0 == '-' { - r0 = '_' - } - - buf.WriteRune(r0) - } - } - - return buf.String() -} - -// SwapCase will swap characters case from upper to lower or lower to upper. -func SwapCase(str string) string { - var r rune - var size int - - buf := &bytes.Buffer{} - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - switch { - case unicode.IsUpper(r): - buf.WriteRune(unicode.ToLower(r)) - - case unicode.IsLower(r): - buf.WriteRune(unicode.ToUpper(r)) - - default: - buf.WriteRune(r) - } - - str = str[size:] - } - - return buf.String() -} - -// FirstRuneToUpper converts first rune to upper case if necessary. -func FirstRuneToUpper(str string) string { - if str == "" { - return str - } - - r, size := utf8.DecodeRuneInString(str) - - if !unicode.IsLower(r) { - return str - } - - buf := &bytes.Buffer{} - buf.WriteRune(unicode.ToUpper(r)) - buf.WriteString(str[size:]) - return buf.String() -} - -// FirstRuneToLower converts first rune to lower case if necessary. -func FirstRuneToLower(str string) string { - if str == "" { - return str - } - - r, size := utf8.DecodeRuneInString(str) - - if !unicode.IsUpper(r) { - return str - } - - buf := &bytes.Buffer{} - buf.WriteRune(unicode.ToLower(r)) - buf.WriteString(str[size:]) - return buf.String() -} - -// Shuffle randomizes runes in a string and returns the result. -// It uses default random source in `math/rand`. -func Shuffle(str string) string { - if str == "" { - return str - } - - runes := []rune(str) - index := 0 - - for i := len(runes) - 1; i > 0; i-- { - index = rand.Intn(i + 1) - - if i != index { - runes[i], runes[index] = runes[index], runes[i] - } - } - - return string(runes) -} - -// ShuffleSource randomizes runes in a string with given random source. -func ShuffleSource(str string, src rand.Source) string { - if str == "" { - return str - } - - runes := []rune(str) - index := 0 - r := rand.New(src) - - for i := len(runes) - 1; i > 0; i-- { - index = r.Intn(i + 1) - - if i != index { - runes[i], runes[index] = runes[index], runes[i] - } - } - - return string(runes) -} - -// Successor returns the successor to string. -// -// If there is one alphanumeric rune is found in string, increase the rune by 1. -// If increment generates a "carry", the rune to the left of it is incremented. -// This process repeats until there is no carry, adding an additional rune if necessary. -// -// If there is no alphanumeric rune, the rightmost rune will be increased by 1 -// regardless whether the result is a valid rune or not. -// -// Only following characters are alphanumeric. -// * a - z -// * A - Z -// * 0 - 9 -// -// Samples (borrowed from ruby's String#succ document): -// "abcd" => "abce" -// "THX1138" => "THX1139" -// "<>" => "<>" -// "1999zzz" => "2000aaa" -// "ZZZ9999" => "AAAA0000" -// "***" => "**+" -func Successor(str string) string { - if str == "" { - return str - } - - var r rune - var i int - carry := ' ' - runes := []rune(str) - l := len(runes) - lastAlphanumeric := l - - for i = l - 1; i >= 0; i-- { - r = runes[i] - - if ('a' <= r && r <= 'y') || - ('A' <= r && r <= 'Y') || - ('0' <= r && r <= '8') { - runes[i]++ - carry = ' ' - lastAlphanumeric = i - break - } - - switch r { - case 'z': - runes[i] = 'a' - carry = 'a' - lastAlphanumeric = i - - case 'Z': - runes[i] = 'A' - carry = 'A' - lastAlphanumeric = i - - case '9': - runes[i] = '0' - carry = '0' - lastAlphanumeric = i - } - } - - // Needs to add one character for carry. - if i < 0 && carry != ' ' { - buf := &bytes.Buffer{} - buf.Grow(l + 4) // Reserve enough space for write. - - if lastAlphanumeric != 0 { - buf.WriteString(str[:lastAlphanumeric]) - } - - buf.WriteRune(carry) - - for _, r = range runes[lastAlphanumeric:] { - buf.WriteRune(r) - } - - return buf.String() - } - - // No alphanumeric character. Simply increase last rune's value. - if lastAlphanumeric == l { - runes[l-1]++ - } - - return string(runes) -} diff --git a/vendor/github.com/huandu/xstrings/count.go b/vendor/github.com/huandu/xstrings/count.go deleted file mode 100644 index abf3214..0000000 --- a/vendor/github.com/huandu/xstrings/count.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "unicode" - "unicode/utf8" -) - -// Get str's utf8 rune length. -func Len(str string) int { - return utf8.RuneCountInString(str) -} - -// Count number of words in a string. -// -// Word is defined as a locale dependent string containing alphabetic characters, -// which may also contain but not start with `'` and `-` characters. -func WordCount(str string) int { - var r rune - var size, n int - - inWord := false - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - switch { - case isAlphabet(r): - if !inWord { - inWord = true - n++ - } - - case inWord && (r == '\'' || r == '-'): - // Still in word. - - default: - inWord = false - } - - str = str[size:] - } - - return n -} - -const minCJKCharacter = '\u3400' - -// Checks r is a letter but not CJK character. -func isAlphabet(r rune) bool { - if !unicode.IsLetter(r) { - return false - } - - switch { - // Quick check for non-CJK character. - case r < minCJKCharacter: - return true - - // Common CJK characters. - case r >= '\u4E00' && r <= '\u9FCC': - return false - - // Rare CJK characters. - case r >= '\u3400' && r <= '\u4D85': - return false - - // Rare and historic CJK characters. - case r >= '\U00020000' && r <= '\U0002B81D': - return false - } - - return true -} - -// Width returns string width in monotype font. -// Multi-byte characters are usually twice the width of single byte characters. -// -// Algorithm comes from `mb_strwidth` in PHP. -// http://php.net/manual/en/function.mb-strwidth.php -func Width(str string) int { - var r rune - var size, n int - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - n += RuneWidth(r) - str = str[size:] - } - - return n -} - -// RuneWidth returns character width in monotype font. -// Multi-byte characters are usually twice the width of single byte characters. -// -// Algorithm comes from `mb_strwidth` in PHP. -// http://php.net/manual/en/function.mb-strwidth.php -func RuneWidth(r rune) int { - switch { - case r == utf8.RuneError || r < '\x20': - return 0 - - case '\x20' <= r && r < '\u2000': - return 1 - - case '\u2000' <= r && r < '\uFF61': - return 2 - - case '\uFF61' <= r && r < '\uFFA0': - return 1 - - case '\uFFA0' <= r: - return 2 - } - - return 0 -} diff --git a/vendor/github.com/huandu/xstrings/doc.go b/vendor/github.com/huandu/xstrings/doc.go deleted file mode 100644 index fab0ba3..0000000 --- a/vendor/github.com/huandu/xstrings/doc.go +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -// Package `xstrings` is to provide string algorithms which are useful but not included in `strings` package. -// See project home page for details. https://github.com/huandu/xstrings -// -// Package `xstrings` assumes all strings are encoded in utf8. -package xstrings diff --git a/vendor/github.com/huandu/xstrings/format.go b/vendor/github.com/huandu/xstrings/format.go deleted file mode 100644 index 2d02df1..0000000 --- a/vendor/github.com/huandu/xstrings/format.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "bytes" - "unicode/utf8" -) - -// ExpandTabs can expand tabs ('\t') rune in str to one or more spaces dpending on -// current column and tabSize. -// The column number is reset to zero after each newline ('\n') occurring in the str. -// -// ExpandTabs uses RuneWidth to decide rune's width. -// For example, CJK characters will be treated as two characters. -// -// If tabSize <= 0, ExpandTabs panics with error. -// -// Samples: -// ExpandTabs("a\tbc\tdef\tghij\tk", 4) => "a bc def ghij k" -// ExpandTabs("abcdefg\thij\nk\tl", 4) => "abcdefg hij\nk l" -// ExpandTabs("z中\t文\tw", 4) => "z中 文 w" -func ExpandTabs(str string, tabSize int) string { - if tabSize <= 0 { - panic("tab size must be positive") - } - - var r rune - var i, size, column, expand int - var output *bytes.Buffer - - orig := str - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - if r == '\t' { - expand = tabSize - column%tabSize - - if output == nil { - output = allocBuffer(orig, str) - } - - for i = 0; i < expand; i++ { - output.WriteByte(byte(' ')) - } - - column += expand - } else { - if r == '\n' { - column = 0 - } else { - column += RuneWidth(r) - } - - if output != nil { - output.WriteRune(r) - } - } - - str = str[size:] - } - - if output == nil { - return orig - } - - return output.String() -} - -// LeftJustify returns a string with pad string at right side if str's rune length is smaller than length. -// If str's rune length is larger than length, str itself will be returned. -// -// If pad is an empty string, str will be returned. -// -// Samples: -// LeftJustify("hello", 4, " ") => "hello" -// LeftJustify("hello", 10, " ") => "hello " -// LeftJustify("hello", 10, "123") => "hello12312" -func LeftJustify(str string, length int, pad string) string { - l := Len(str) - - if l >= length || pad == "" { - return str - } - - remains := length - l - padLen := Len(pad) - - output := &bytes.Buffer{} - output.Grow(len(str) + (remains/padLen+1)*len(pad)) - output.WriteString(str) - writePadString(output, pad, padLen, remains) - return output.String() -} - -// RightJustify returns a string with pad string at left side if str's rune length is smaller than length. -// If str's rune length is larger than length, str itself will be returned. -// -// If pad is an empty string, str will be returned. -// -// Samples: -// RightJustify("hello", 4, " ") => "hello" -// RightJustify("hello", 10, " ") => " hello" -// RightJustify("hello", 10, "123") => "12312hello" -func RightJustify(str string, length int, pad string) string { - l := Len(str) - - if l >= length || pad == "" { - return str - } - - remains := length - l - padLen := Len(pad) - - output := &bytes.Buffer{} - output.Grow(len(str) + (remains/padLen+1)*len(pad)) - writePadString(output, pad, padLen, remains) - output.WriteString(str) - return output.String() -} - -// Center returns a string with pad string at both side if str's rune length is smaller than length. -// If str's rune length is larger than length, str itself will be returned. -// -// If pad is an empty string, str will be returned. -// -// Samples: -// Center("hello", 4, " ") => "hello" -// Center("hello", 10, " ") => " hello " -// Center("hello", 10, "123") => "12hello123" -func Center(str string, length int, pad string) string { - l := Len(str) - - if l >= length || pad == "" { - return str - } - - remains := length - l - padLen := Len(pad) - - output := &bytes.Buffer{} - output.Grow(len(str) + (remains/padLen+1)*len(pad)) - writePadString(output, pad, padLen, remains/2) - output.WriteString(str) - writePadString(output, pad, padLen, (remains+1)/2) - return output.String() -} - -func writePadString(output *bytes.Buffer, pad string, padLen, remains int) { - var r rune - var size int - - repeats := remains / padLen - - for i := 0; i < repeats; i++ { - output.WriteString(pad) - } - - remains = remains % padLen - - if remains != 0 { - for i := 0; i < remains; i++ { - r, size = utf8.DecodeRuneInString(pad) - output.WriteRune(r) - pad = pad[size:] - } - } -} diff --git a/vendor/github.com/huandu/xstrings/manipulate.go b/vendor/github.com/huandu/xstrings/manipulate.go deleted file mode 100644 index 4114f96..0000000 --- a/vendor/github.com/huandu/xstrings/manipulate.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "bytes" - "strings" - "unicode/utf8" -) - -// Reverse a utf8 encoded string. -func Reverse(str string) string { - var size int - - tail := len(str) - buf := make([]byte, tail) - s := buf - - for len(str) > 0 { - _, size = utf8.DecodeRuneInString(str) - tail -= size - s = append(s[:tail], []byte(str[:size])...) - str = str[size:] - } - - return string(buf) -} - -// Slice a string by rune. -// -// Start must satisfy 0 <= start <= rune length. -// -// End can be positive, zero or negative. -// If end >= 0, start and end must satisfy start <= end <= rune length. -// If end < 0, it means slice to the end of string. -// -// Otherwise, Slice will panic as out of range. -func Slice(str string, start, end int) string { - var size, startPos, endPos int - - origin := str - - if start < 0 || end > len(str) || (end >= 0 && start > end) { - panic("out of range") - } - - if end >= 0 { - end -= start - } - - for start > 0 && len(str) > 0 { - _, size = utf8.DecodeRuneInString(str) - start-- - startPos += size - str = str[size:] - } - - if end < 0 { - return origin[startPos:] - } - - endPos = startPos - - for end > 0 && len(str) > 0 { - _, size = utf8.DecodeRuneInString(str) - end-- - endPos += size - str = str[size:] - } - - if len(str) == 0 && (start > 0 || end > 0) { - panic("out of range") - } - - return origin[startPos:endPos] -} - -// Partition splits a string by sep into three parts. -// The return value is a slice of strings with head, match and tail. -// -// If str contains sep, for example "hello" and "l", Partition returns -// "he", "l", "lo" -// -// If str doesn't contain sep, for example "hello" and "x", Partition returns -// "hello", "", "" -func Partition(str, sep string) (head, match, tail string) { - index := strings.Index(str, sep) - - if index == -1 { - head = str - return - } - - head = str[:index] - match = str[index : index+len(sep)] - tail = str[index+len(sep):] - return -} - -// LastPartition splits a string by last instance of sep into three parts. -// The return value is a slice of strings with head, match and tail. -// -// If str contains sep, for example "hello" and "l", LastPartition returns -// "hel", "l", "o" -// -// If str doesn't contain sep, for example "hello" and "x", LastPartition returns -// "", "", "hello" -func LastPartition(str, sep string) (head, match, tail string) { - index := strings.LastIndex(str, sep) - - if index == -1 { - tail = str - return - } - - head = str[:index] - match = str[index : index+len(sep)] - tail = str[index+len(sep):] - return -} - -// Insert src into dst at given rune index. -// Index is counted by runes instead of bytes. -// -// If index is out of range of dst, panic with out of range. -func Insert(dst, src string, index int) string { - return Slice(dst, 0, index) + src + Slice(dst, index, -1) -} - -// Scrubs invalid utf8 bytes with repl string. -// Adjacent invalid bytes are replaced only once. -func Scrub(str, repl string) string { - var buf *bytes.Buffer - var r rune - var size, pos int - var hasError bool - - origin := str - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - if r == utf8.RuneError { - if !hasError { - if buf == nil { - buf = &bytes.Buffer{} - } - - buf.WriteString(origin[:pos]) - hasError = true - } - } else if hasError { - hasError = false - buf.WriteString(repl) - - origin = origin[pos:] - pos = 0 - } - - pos += size - str = str[size:] - } - - if buf != nil { - buf.WriteString(origin) - return buf.String() - } - - // No invalid byte. - return origin -} - -// Splits a string into words. Returns a slice of words. -// If there is no word in a string, return nil. -// -// Word is defined as a locale dependent string containing alphabetic characters, -// which may also contain but not start with `'` and `-` characters. -func WordSplit(str string) []string { - var word string - var words []string - var r rune - var size, pos int - - inWord := false - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - switch { - case isAlphabet(r): - if !inWord { - inWord = true - word = str - pos = 0 - } - - case inWord && (r == '\'' || r == '-'): - // Still in word. - - default: - if inWord { - inWord = false - words = append(words, word[:pos]) - } - } - - pos += size - str = str[size:] - } - - if inWord { - words = append(words, word[:pos]) - } - - return words -} diff --git a/vendor/github.com/huandu/xstrings/translate.go b/vendor/github.com/huandu/xstrings/translate.go deleted file mode 100644 index bf6df38..0000000 --- a/vendor/github.com/huandu/xstrings/translate.go +++ /dev/null @@ -1,545 +0,0 @@ -// Copyright 2015 Huan Du. All rights reserved. -// Licensed under the MIT license that can be found in the LICENSE file. - -package xstrings - -import ( - "bytes" - "unicode" - "unicode/utf8" -) - -type runeRangeMap struct { - FromLo rune // Lower bound of range map. - FromHi rune // An inclusive higher bound of range map. - ToLo rune - ToHi rune -} - -type runeDict struct { - Dict [unicode.MaxASCII + 1]rune -} - -type runeMap map[rune]rune - -// Translator can translate string with pre-compiled from and to patterns. -// If a from/to pattern pair needs to be used more than once, it's recommended -// to create a Translator and reuse it. -type Translator struct { - quickDict *runeDict // A quick dictionary to look up rune by index. Only availabe for latin runes. - runeMap runeMap // Rune map for translation. - ranges []*runeRangeMap // Ranges of runes. - mappedRune rune // If mappedRune >= 0, all matched runes are translated to the mappedRune. - reverted bool // If to pattern is empty, all matched characters will be deleted. - hasPattern bool -} - -// NewTranslator creates new Translator through a from/to pattern pair. -func NewTranslator(from, to string) *Translator { - tr := &Translator{} - - if from == "" { - return tr - } - - reverted := from[0] == '^' - deletion := len(to) == 0 - - if reverted { - from = from[1:] - } - - var fromStart, fromEnd, fromRangeStep rune - var toStart, toEnd, toRangeStep rune - var fromRangeSize, toRangeSize rune - var singleRunes []rune - - // Update the to rune range. - updateRange := func() { - // No more rune to read in the to rune pattern. - if toEnd == utf8.RuneError { - return - } - - if toRangeStep == 0 { - to, toStart, toEnd, toRangeStep = nextRuneRange(to, toEnd) - return - } - - // Current range is not empty. Consume 1 rune from start. - if toStart != toEnd { - toStart += toRangeStep - return - } - - // No more rune. Repeat the last rune. - if to == "" { - toEnd = utf8.RuneError - return - } - - // Both start and end are used. Read two more runes from the to pattern. - to, toStart, toEnd, toRangeStep = nextRuneRange(to, utf8.RuneError) - } - - if deletion { - toStart = utf8.RuneError - toEnd = utf8.RuneError - } else { - // If from pattern is reverted, only the last rune in the to pattern will be used. - if reverted { - var size int - - for len(to) > 0 { - toStart, size = utf8.DecodeRuneInString(to) - to = to[size:] - } - - toEnd = utf8.RuneError - } else { - to, toStart, toEnd, toRangeStep = nextRuneRange(to, utf8.RuneError) - } - } - - fromEnd = utf8.RuneError - - for len(from) > 0 { - from, fromStart, fromEnd, fromRangeStep = nextRuneRange(from, fromEnd) - - // fromStart is a single character. Just map it with a rune in the to pattern. - if fromRangeStep == 0 { - singleRunes = tr.addRune(fromStart, toStart, singleRunes) - updateRange() - continue - } - - for toEnd != utf8.RuneError && fromStart != fromEnd { - // If mapped rune is a single character instead of a range, simply shift first - // rune in the range. - if toRangeStep == 0 { - singleRunes = tr.addRune(fromStart, toStart, singleRunes) - updateRange() - fromStart += fromRangeStep - continue - } - - fromRangeSize = (fromEnd - fromStart) * fromRangeStep - toRangeSize = (toEnd - toStart) * toRangeStep - - // Not enough runes in the to pattern. Need to read more. - if fromRangeSize > toRangeSize { - fromStart, toStart = tr.addRuneRange(fromStart, fromStart+toRangeSize*fromRangeStep, toStart, toEnd, singleRunes) - fromStart += fromRangeStep - updateRange() - - // Edge case: If fromRangeSize == toRangeSize + 1, the last fromStart value needs be considered - // as a single rune. - if fromStart == fromEnd { - singleRunes = tr.addRune(fromStart, toStart, singleRunes) - updateRange() - } - - continue - } - - fromStart, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart+fromRangeSize*toRangeStep, singleRunes) - updateRange() - break - } - - if fromStart == fromEnd { - fromEnd = utf8.RuneError - continue - } - - fromStart, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart, singleRunes) - fromEnd = utf8.RuneError - } - - if fromEnd != utf8.RuneError { - singleRunes = tr.addRune(fromEnd, toStart, singleRunes) - } - - tr.reverted = reverted - tr.mappedRune = -1 - tr.hasPattern = true - - // Translate RuneError only if in deletion or reverted mode. - if deletion || reverted { - tr.mappedRune = toStart - } - - return tr -} - -func (tr *Translator) addRune(from, to rune, singleRunes []rune) []rune { - if from <= unicode.MaxASCII { - if tr.quickDict == nil { - tr.quickDict = &runeDict{} - } - - tr.quickDict.Dict[from] = to - } else { - if tr.runeMap == nil { - tr.runeMap = make(runeMap) - } - - tr.runeMap[from] = to - } - - singleRunes = append(singleRunes, from) - return singleRunes -} - -func (tr *Translator) addRuneRange(fromLo, fromHi, toLo, toHi rune, singleRunes []rune) (rune, rune) { - var r rune - var rrm *runeRangeMap - - if fromLo < fromHi { - rrm = &runeRangeMap{ - FromLo: fromLo, - FromHi: fromHi, - ToLo: toLo, - ToHi: toHi, - } - } else { - rrm = &runeRangeMap{ - FromLo: fromHi, - FromHi: fromLo, - ToLo: toHi, - ToHi: toLo, - } - } - - // If there is any single rune conflicts with this rune range, clear single rune record. - for _, r = range singleRunes { - if rrm.FromLo <= r && r <= rrm.FromHi { - if r <= unicode.MaxASCII { - tr.quickDict.Dict[r] = 0 - } else { - delete(tr.runeMap, r) - } - } - } - - tr.ranges = append(tr.ranges, rrm) - return fromHi, toHi -} - -func nextRuneRange(str string, last rune) (remaining string, start, end rune, rangeStep rune) { - var r rune - var size int - - remaining = str - escaping := false - isRange := false - - for len(remaining) > 0 { - r, size = utf8.DecodeRuneInString(remaining) - remaining = remaining[size:] - - // Parse special characters. - if !escaping { - if r == '\\' { - escaping = true - continue - } - - if r == '-' { - // Ignore slash at beginning of string. - if last == utf8.RuneError { - continue - } - - start = last - isRange = true - continue - } - } - - escaping = false - - if last != utf8.RuneError { - // This is a range which start and end are the same. - // Considier it as a normal character. - if isRange && last == r { - isRange = false - continue - } - - start = last - end = r - - if isRange { - if start < end { - rangeStep = 1 - } else { - rangeStep = -1 - } - } - - return - } - - last = r - } - - start = last - end = utf8.RuneError - return -} - -// Translate str with a from/to pattern pair. -// -// See comment in Translate function for usage and samples. -func (tr *Translator) Translate(str string) string { - if !tr.hasPattern || str == "" { - return str - } - - var r rune - var size int - var needTr bool - - orig := str - - var output *bytes.Buffer - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - r, needTr = tr.TranslateRune(r) - - if needTr && output == nil { - output = allocBuffer(orig, str) - } - - if r != utf8.RuneError && output != nil { - output.WriteRune(r) - } - - str = str[size:] - } - - // No character is translated. - if output == nil { - return orig - } - - return output.String() -} - -// TranslateRune return translated rune and true if r matches the from pattern. -// If r doesn't match the pattern, original r is returned and translated is false. -func (tr *Translator) TranslateRune(r rune) (result rune, translated bool) { - switch { - case tr.quickDict != nil: - if r <= unicode.MaxASCII { - result = tr.quickDict.Dict[r] - - if result != 0 { - translated = true - - if tr.mappedRune >= 0 { - result = tr.mappedRune - } - - break - } - } - - fallthrough - - case tr.runeMap != nil: - var ok bool - - if result, ok = tr.runeMap[r]; ok { - translated = true - - if tr.mappedRune >= 0 { - result = tr.mappedRune - } - - break - } - - fallthrough - - default: - var rrm *runeRangeMap - ranges := tr.ranges - - for i := len(ranges) - 1; i >= 0; i-- { - rrm = ranges[i] - - if rrm.FromLo <= r && r <= rrm.FromHi { - translated = true - - if tr.mappedRune >= 0 { - result = tr.mappedRune - break - } - - if rrm.ToLo < rrm.ToHi { - result = rrm.ToLo + r - rrm.FromLo - } else if rrm.ToLo > rrm.ToHi { - // ToHi can be smaller than ToLo if range is from higher to lower. - result = rrm.ToLo - r + rrm.FromLo - } else { - result = rrm.ToLo - } - - break - } - } - } - - if tr.reverted { - if !translated { - result = tr.mappedRune - } - - translated = !translated - } - - if !translated { - result = r - } - - return -} - -// HasPattern returns true if Translator has one pattern at least. -func (tr *Translator) HasPattern() bool { - return tr.hasPattern -} - -// Translate str with the characters defined in from replaced by characters defined in to. -// -// From and to are patterns representing a set of characters. Pattern is defined as following. -// -// * Special characters -// * '-' means a range of runes, e.g. -// * "a-z" means all characters from 'a' to 'z' inclusive; -// * "z-a" means all characters from 'z' to 'a' inclusive. -// * '^' as first character means a set of all runes excepted listed, e.g. -// * "^a-z" means all characters except 'a' to 'z' inclusive. -// * '\' escapes special characters. -// * Normal character represents itself, e.g. "abc" is a set including 'a', 'b' and 'c'. -// -// Translate will try to find a 1:1 mapping from from to to. -// If to is smaller than from, last rune in to will be used to map "out of range" characters in from. -// -// Note that '^' only works in the from pattern. It will be considered as a normal character in the to pattern. -// -// If the to pattern is an empty string, Translate works exactly the same as Delete. -// -// Samples: -// Translate("hello", "aeiou", "12345") => "h2ll4" -// Translate("hello", "a-z", "A-Z") => "HELLO" -// Translate("hello", "z-a", "a-z") => "svool" -// Translate("hello", "aeiou", "*") => "h*ll*" -// Translate("hello", "^l", "*") => "**ll*" -// Translate("hello ^ world", `\^lo`, "*") => "he*** * w*r*d" -func Translate(str, from, to string) string { - tr := NewTranslator(from, to) - return tr.Translate(str) -} - -// Delete runes in str matching the pattern. -// Pattern is defined in Translate function. -// -// Samples: -// Delete("hello", "aeiou") => "hll" -// Delete("hello", "a-k") => "llo" -// Delete("hello", "^a-k") => "he" -func Delete(str, pattern string) string { - tr := NewTranslator(pattern, "") - return tr.Translate(str) -} - -// Count how many runes in str match the pattern. -// Pattern is defined in Translate function. -// -// Samples: -// Count("hello", "aeiou") => 3 -// Count("hello", "a-k") => 3 -// Count("hello", "^a-k") => 2 -func Count(str, pattern string) int { - if pattern == "" || str == "" { - return 0 - } - - var r rune - var size int - var matched bool - - tr := NewTranslator(pattern, "") - cnt := 0 - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - str = str[size:] - - if _, matched = tr.TranslateRune(r); matched { - cnt++ - } - } - - return cnt -} - -// Squeeze deletes adjacent repeated runes in str. -// If pattern is not empty, only runes matching the pattern will be squeezed. -// -// Samples: -// Squeeze("hello", "") => "helo" -// Squeeze("hello", "m-z") => "hello" -func Squeeze(str, pattern string) string { - var last, r rune - var size int - var skipSqueeze, matched bool - var tr *Translator - var output *bytes.Buffer - - orig := str - last = -1 - - if len(pattern) > 0 { - tr = NewTranslator(pattern, "") - } - - for len(str) > 0 { - r, size = utf8.DecodeRuneInString(str) - - // Need to squeeze the str. - if last == r && !skipSqueeze { - if tr != nil { - if _, matched = tr.TranslateRune(r); !matched { - skipSqueeze = true - } - } - - if output == nil { - output = allocBuffer(orig, str) - } - - if skipSqueeze { - output.WriteRune(r) - } - } else { - if output != nil { - output.WriteRune(r) - } - - last = r - } - - str = str[size:] - } - - if output == nil { - return orig - } - - return output.String() -} diff --git a/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md b/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md deleted file mode 100644 index 469b449..0000000 --- a/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,46 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at i@dario.im. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] - -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/imdario/mergo/LICENSE b/vendor/github.com/imdario/mergo/LICENSE deleted file mode 100644 index 6866802..0000000 --- a/vendor/github.com/imdario/mergo/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2013 Dario Castañé. All rights reserved. -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md deleted file mode 100644 index b131069..0000000 --- a/vendor/github.com/imdario/mergo/README.md +++ /dev/null @@ -1,141 +0,0 @@ -# Mergo - -A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. - -Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region Marche. - -![Mergo dall'alto](http://www.comune.mergo.an.it/Siti/Mergo/Immagini/Foto/mergo_dall_alto.jpg) - -## Status - -It is ready for production use. It works fine after extensive use in the wild. - -[![Build Status][1]][2] -[![GoDoc][3]][4] -[![GoCard][5]][6] - -[1]: https://travis-ci.org/imdario/mergo.png -[2]: https://travis-ci.org/imdario/mergo -[3]: https://godoc.org/github.com/imdario/mergo?status.svg -[4]: https://godoc.org/github.com/imdario/mergo -[5]: https://goreportcard.com/badge/imdario/mergo -[6]: https://goreportcard.com/report/github.com/imdario/mergo - -### Important note - -Mergo is intended to assign **only** zero value fields on destination with source value. Since April 6th it works like this. Before it didn't work properly, causing some random overwrites. After some issues and PRs I found it didn't merge as I designed it. Thanks to [imdario/mergo#8](https://github.com/imdario/mergo/pull/8) overwriting functions were added and the wrong behavior was clearly detected. - -If you were using Mergo **before** April 6th 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause (I hope it won't!) in existing projects after the change (release 0.2.0). - -### Mergo in the wild - -- [docker/docker](https://github.com/docker/docker/) -- [kubernetes/kubernetes](https://github.com/kubernetes/kubernetes) -- [imdario/zas](https://github.com/imdario/zas) -- [soniah/dnsmadeeasy](https://github.com/soniah/dnsmadeeasy) -- [EagerIO/Stout](https://github.com/EagerIO/Stout) -- [lynndylanhurley/defsynth-api](https://github.com/lynndylanhurley/defsynth-api) -- [russross/canvasassignments](https://github.com/russross/canvasassignments) -- [rdegges/cryptly-api](https://github.com/rdegges/cryptly-api) -- [casualjim/exeggutor](https://github.com/casualjim/exeggutor) -- [divshot/gitling](https://github.com/divshot/gitling) -- [RWJMurphy/gorl](https://github.com/RWJMurphy/gorl) -- [andrerocker/deploy42](https://github.com/andrerocker/deploy42) -- [elwinar/rambler](https://github.com/elwinar/rambler) -- [tmaiaroto/gopartman](https://github.com/tmaiaroto/gopartman) -- [jfbus/impressionist](https://github.com/jfbus/impressionist) -- [Jmeyering/zealot](https://github.com/Jmeyering/zealot) -- [godep-migrator/rigger-host](https://github.com/godep-migrator/rigger-host) -- [Dronevery/MultiwaySwitch-Go](https://github.com/Dronevery/MultiwaySwitch-Go) -- [thoas/picfit](https://github.com/thoas/picfit) -- [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server) -- [jnuthong/item_search](https://github.com/jnuthong/item_search) -- [Iris Web Framework](https://github.com/kataras/iris) - -## Installation - - go get github.com/imdario/mergo - - // use in your .go code - import ( - "github.com/imdario/mergo" - ) - -## Usage - -You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. Also maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection). - -```go -if err := mergo.Merge(&dst, src); err != nil { - // ... -} -``` - -Also, you can merge overwriting values using MergeWithOverwrite. - -```go -if err := mergo.MergeWithOverwrite(&dst, src); err != nil { - // ... -} -``` - -Additionally, you can map a map[string]interface{} to a struct (and otherwise, from struct to map), following the same restrictions as in Merge(). Keys are capitalized to find each corresponding exported field. - -```go -if err := mergo.Map(&dst, srcMap); err != nil { - // ... -} -``` - -Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as map[string]interface{}. They will be just assigned as values. - -More information and examples in [godoc documentation](http://godoc.org/github.com/imdario/mergo). - -### Nice example - -```go -package main - -import ( - "fmt" - "github.com/imdario/mergo" -) - -type Foo struct { - A string - B int64 -} - -func main() { - src := Foo{ - A: "one", - B: 2, - } - - dest := Foo{ - A: "two", - } - - mergo.Merge(&dest, src) - - fmt.Println(dest) - // Will print - // {two 2} -} -``` - -Note: if test are failing due missing package, please execute: - - go get gopkg.in/yaml.v1 - -## Contact me - -If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario) - -## About - -Written by [Dario Castañé](http://dario.im). - -## License - -[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE). diff --git a/vendor/github.com/imdario/mergo/doc.go b/vendor/github.com/imdario/mergo/doc.go deleted file mode 100644 index 6e9aa7b..0000000 --- a/vendor/github.com/imdario/mergo/doc.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2013 Dario Castañé. All rights reserved. -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package mergo merges same-type structs and maps by setting default values in zero-value fields. - -Mergo won't merge unexported (private) fields but will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection). - -Usage - -From my own work-in-progress project: - - type networkConfig struct { - Protocol string - Address string - ServerType string `json: "server_type"` - Port uint16 - } - - type FssnConfig struct { - Network networkConfig - } - - var fssnDefault = FssnConfig { - networkConfig { - "tcp", - "127.0.0.1", - "http", - 31560, - }, - } - - // Inside a function [...] - - if err := mergo.Merge(&config, fssnDefault); err != nil { - log.Fatal(err) - } - - // More code [...] - -*/ -package mergo diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go deleted file mode 100644 index 9900256..0000000 --- a/vendor/github.com/imdario/mergo/map.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2014 Dario Castañé. All rights reserved. -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Based on src/pkg/reflect/deepequal.go from official -// golang's stdlib. - -package mergo - -import ( - "fmt" - "reflect" - "unicode" - "unicode/utf8" -) - -func changeInitialCase(s string, mapper func(rune) rune) string { - if s == "" { - return s - } - r, n := utf8.DecodeRuneInString(s) - return string(mapper(r)) + s[n:] -} - -func isExported(field reflect.StructField) bool { - r, _ := utf8.DecodeRuneInString(field.Name) - return r >= 'A' && r <= 'Z' -} - -// Traverses recursively both values, assigning src's fields values to dst. -// The map argument tracks comparisons that have already been seen, which allows -// short circuiting on recursive types. -func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, overwrite bool) (err error) { - if dst.CanAddr() { - addr := dst.UnsafeAddr() - h := 17 * addr - seen := visited[h] - typ := dst.Type() - for p := seen; p != nil; p = p.next { - if p.ptr == addr && p.typ == typ { - return nil - } - } - // Remember, remember... - visited[h] = &visit{addr, typ, seen} - } - zeroValue := reflect.Value{} - switch dst.Kind() { - case reflect.Map: - dstMap := dst.Interface().(map[string]interface{}) - for i, n := 0, src.NumField(); i < n; i++ { - srcType := src.Type() - field := srcType.Field(i) - if !isExported(field) { - continue - } - fieldName := field.Name - fieldName = changeInitialCase(fieldName, unicode.ToLower) - if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) { - dstMap[fieldName] = src.Field(i).Interface() - } - } - case reflect.Ptr: - if dst.IsNil() { - v := reflect.New(dst.Type().Elem()) - dst.Set(v) - } - dst = dst.Elem() - fallthrough - case reflect.Struct: - srcMap := src.Interface().(map[string]interface{}) - for key := range srcMap { - srcValue := srcMap[key] - fieldName := changeInitialCase(key, unicode.ToUpper) - dstElement := dst.FieldByName(fieldName) - if dstElement == zeroValue { - // We discard it because the field doesn't exist. - continue - } - srcElement := reflect.ValueOf(srcValue) - dstKind := dstElement.Kind() - srcKind := srcElement.Kind() - if srcKind == reflect.Ptr && dstKind != reflect.Ptr { - srcElement = srcElement.Elem() - srcKind = reflect.TypeOf(srcElement.Interface()).Kind() - } else if dstKind == reflect.Ptr { - // Can this work? I guess it can't. - if srcKind != reflect.Ptr && srcElement.CanAddr() { - srcPtr := srcElement.Addr() - srcElement = reflect.ValueOf(srcPtr) - srcKind = reflect.Ptr - } - } - - if !srcElement.IsValid() { - continue - } - if srcKind == dstKind { - if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { - return - } - } else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface { - if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { - return - } - } else if srcKind == reflect.Map { - if err = deepMap(dstElement, srcElement, visited, depth+1, overwrite); err != nil { - return - } - } else { - return fmt.Errorf("type mismatch on %s field: found %v, expected %v", fieldName, srcKind, dstKind) - } - } - } - return -} - -// Map sets fields' values in dst from src. -// src can be a map with string keys or a struct. dst must be the opposite: -// if src is a map, dst must be a valid pointer to struct. If src is a struct, -// dst must be map[string]interface{}. -// It won't merge unexported (private) fields and will do recursively -// any exported field. -// If dst is a map, keys will be src fields' names in lower camel case. -// Missing key in src that doesn't match a field in dst will be skipped. This -// doesn't apply if dst is a map. -// This is separated method from Merge because it is cleaner and it keeps sane -// semantics: merging equal types, mapping different (restricted) types. -func Map(dst, src interface{}) error { - return _map(dst, src, false) -} - -// MapWithOverwrite will do the same as Map except that non-empty dst attributes will be overriden by -// non-empty src attribute values. -func MapWithOverwrite(dst, src interface{}) error { - return _map(dst, src, true) -} - -func _map(dst, src interface{}, overwrite bool) error { - var ( - vDst, vSrc reflect.Value - err error - ) - if vDst, vSrc, err = resolveValues(dst, src); err != nil { - return err - } - // To be friction-less, we redirect equal-type arguments - // to deepMerge. Only because arguments can be anything. - if vSrc.Kind() == vDst.Kind() { - return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) - } - switch vSrc.Kind() { - case reflect.Struct: - if vDst.Kind() != reflect.Map { - return ErrExpectedMapAsDestination - } - case reflect.Map: - if vDst.Kind() != reflect.Struct { - return ErrExpectedStructAsDestination - } - default: - return ErrNotSupported - } - return deepMap(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) -} diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go deleted file mode 100644 index 052b9fe..0000000 --- a/vendor/github.com/imdario/mergo/merge.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2013 Dario Castañé. All rights reserved. -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Based on src/pkg/reflect/deepequal.go from official -// golang's stdlib. - -package mergo - -import ( - "reflect" -) - -func hasExportedField(dst reflect.Value) (exported bool) { - for i, n := 0, dst.NumField(); i < n; i++ { - field := dst.Type().Field(i) - if field.Anonymous { - exported = exported || hasExportedField(dst.Field(i)) - } else { - exported = exported || len(field.PkgPath) == 0 - } - } - return -} - -// Traverses recursively both values, assigning src's fields values to dst. -// The map argument tracks comparisons that have already been seen, which allows -// short circuiting on recursive types. -func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, overwrite bool) (err error) { - if !src.IsValid() { - return - } - if dst.CanAddr() { - addr := dst.UnsafeAddr() - h := 17 * addr - seen := visited[h] - typ := dst.Type() - for p := seen; p != nil; p = p.next { - if p.ptr == addr && p.typ == typ { - return nil - } - } - // Remember, remember... - visited[h] = &visit{addr, typ, seen} - } - switch dst.Kind() { - case reflect.Struct: - if hasExportedField(dst) { - for i, n := 0, dst.NumField(); i < n; i++ { - if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, overwrite); err != nil { - return - } - } - } else { - if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) { - dst.Set(src) - } - } - case reflect.Map: - if len(src.MapKeys()) == 0 && !src.IsNil() && len(dst.MapKeys()) == 0 { - dst.Set(reflect.MakeMap(dst.Type())) - return - } - for _, key := range src.MapKeys() { - srcElement := src.MapIndex(key) - if !srcElement.IsValid() { - continue - } - dstElement := dst.MapIndex(key) - switch srcElement.Kind() { - case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Slice: - if srcElement.IsNil() { - continue - } - fallthrough - default: - if !srcElement.CanInterface() { - continue - } - switch reflect.TypeOf(srcElement.Interface()).Kind() { - case reflect.Struct: - fallthrough - case reflect.Ptr: - fallthrough - case reflect.Map: - if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { - return - } - } - } - if dstElement.IsValid() && reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map { - continue - } - - if !isEmptyValue(srcElement) && (overwrite || (!dstElement.IsValid() || isEmptyValue(dst))) { - if dst.IsNil() { - dst.Set(reflect.MakeMap(dst.Type())) - } - dst.SetMapIndex(key, srcElement) - } - } - case reflect.Ptr: - fallthrough - case reflect.Interface: - if src.Kind() != reflect.Interface { - if dst.IsNil() || overwrite { - if dst.CanSet() && (overwrite || isEmptyValue(dst)) { - dst.Set(src) - } - } else if src.Kind() == reflect.Ptr { - if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, overwrite); err != nil { - return - } - } else if dst.Elem().Type() == src.Type() { - if err = deepMerge(dst.Elem(), src, visited, depth+1, overwrite); err != nil { - return - } - } else { - return ErrDifferentArgumentsTypes - } - break - } - if src.IsNil() { - break - } else if dst.IsNil() || overwrite { - if dst.CanSet() && (overwrite || isEmptyValue(dst)) { - dst.Set(src) - } - } else if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, overwrite); err != nil { - return - } - default: - if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) { - dst.Set(src) - } - } - return -} - -// Merge will fill any empty for value type attributes on the dst struct using corresponding -// src attributes if they themselves are not empty. dst and src must be valid same-type structs -// and dst must be a pointer to struct. -// It won't merge unexported (private) fields and will do recursively any exported field. -func Merge(dst, src interface{}) error { - return merge(dst, src, false) -} - -// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overriden by -// non-empty src attribute values. -func MergeWithOverwrite(dst, src interface{}) error { - return merge(dst, src, true) -} - -func merge(dst, src interface{}, overwrite bool) error { - var ( - vDst, vSrc reflect.Value - err error - ) - if vDst, vSrc, err = resolveValues(dst, src); err != nil { - return err - } - if vDst.Type() != vSrc.Type() { - return ErrDifferentArgumentsTypes - } - return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) -} diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go deleted file mode 100644 index 79ccdf5..0000000 --- a/vendor/github.com/imdario/mergo/mergo.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2013 Dario Castañé. All rights reserved. -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Based on src/pkg/reflect/deepequal.go from official -// golang's stdlib. - -package mergo - -import ( - "errors" - "reflect" -) - -// Errors reported by Mergo when it finds invalid arguments. -var ( - ErrNilArguments = errors.New("src and dst must not be nil") - ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type") - ErrNotSupported = errors.New("only structs and maps are supported") - ErrExpectedMapAsDestination = errors.New("dst was expected to be a map") - ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct") -) - -// During deepMerge, must keep track of checks that are -// in progress. The comparison algorithm assumes that all -// checks in progress are true when it reencounters them. -// Visited are stored in a map indexed by 17 * a1 + a2; -type visit struct { - ptr uintptr - typ reflect.Type - next *visit -} - -// From src/pkg/encoding/json. -func isEmptyValue(v reflect.Value) bool { - switch v.Kind() { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return v.Len() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Interface, reflect.Ptr, reflect.Func: - return v.IsNil() - } - return false -} - -func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) { - if dst == nil || src == nil { - err = ErrNilArguments - return - } - vDst = reflect.ValueOf(dst).Elem() - if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map { - err = ErrNotSupported - return - } - vSrc = reflect.ValueOf(src) - // We check if vSrc is a pointer to dereference it. - if vSrc.Kind() == reflect.Ptr { - vSrc = vSrc.Elem() - } - return -} - -// Traverses recursively both values, assigning src's fields values to dst. -// The map argument tracks comparisons that have already been seen, which allows -// short circuiting on recursive types. -func deeper(dst, src reflect.Value, visited map[uintptr]*visit, depth int) (err error) { - if dst.CanAddr() { - addr := dst.UnsafeAddr() - h := 17 * addr - seen := visited[h] - typ := dst.Type() - for p := seen; p != nil; p = p.next { - if p.ptr == addr && p.typ == typ { - return nil - } - } - // Remember, remember... - visited[h] = &visit{addr, typ, seen} - } - return // TODO refactor -} diff --git a/vendor/github.com/satori/go.uuid/LICENSE b/vendor/github.com/satori/go.uuid/LICENSE deleted file mode 100644 index 488357b..0000000 --- a/vendor/github.com/satori/go.uuid/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (C) 2013-2016 by Maxim Bublis - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/satori/go.uuid/README.md b/vendor/github.com/satori/go.uuid/README.md deleted file mode 100644 index b6aad1c..0000000 --- a/vendor/github.com/satori/go.uuid/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# UUID package for Go language - -[![Build Status](https://travis-ci.org/satori/go.uuid.png?branch=master)](https://travis-ci.org/satori/go.uuid) -[![Coverage Status](https://coveralls.io/repos/github/satori/go.uuid/badge.svg?branch=master)](https://coveralls.io/github/satori/go.uuid) -[![GoDoc](http://godoc.org/github.com/satori/go.uuid?status.png)](http://godoc.org/github.com/satori/go.uuid) - -This package provides pure Go implementation of Universally Unique Identifier (UUID). Supported both creation and parsing of UUIDs. - -With 100% test coverage and benchmarks out of box. - -Supported versions: -* Version 1, based on timestamp and MAC address (RFC 4122) -* Version 2, based on timestamp, MAC address and POSIX UID/GID (DCE 1.1) -* Version 3, based on MD5 hashing (RFC 4122) -* Version 4, based on random numbers (RFC 4122) -* Version 5, based on SHA-1 hashing (RFC 4122) - -## Installation - -Use the `go` command: - - $ go get github.com/satori/go.uuid - -## Requirements - -UUID package requires Go >= 1.2. - -## Example - -```go -package main - -import ( - "fmt" - "github.com/satori/go.uuid" -) - -func main() { - // Creating UUID Version 4 - u1 := uuid.NewV4() - fmt.Printf("UUIDv4: %s\n", u1) - - // Parsing UUID from string input - u2, err := uuid.FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - if err != nil { - fmt.Printf("Something gone wrong: %s", err) - } - fmt.Printf("Successfully parsed: %s", u2) -} -``` - -## Documentation - -[Documentation](http://godoc.org/github.com/satori/go.uuid) is hosted at GoDoc project. - -## Links -* [RFC 4122](http://tools.ietf.org/html/rfc4122) -* [DCE 1.1: Authentication and Security Services](http://pubs.opengroup.org/onlinepubs/9696989899/chap5.htm#tagcjh_08_02_01_01) - -## Copyright - -Copyright (C) 2013-2016 by Maxim Bublis . - -UUID package released under MIT License. -See [LICENSE](https://github.com/satori/go.uuid/blob/master/LICENSE) for details. diff --git a/vendor/github.com/satori/go.uuid/uuid.go b/vendor/github.com/satori/go.uuid/uuid.go deleted file mode 100644 index 9c7fbaa..0000000 --- a/vendor/github.com/satori/go.uuid/uuid.go +++ /dev/null @@ -1,488 +0,0 @@ -// Copyright (C) 2013-2015 by Maxim Bublis -// -// Permission is hereby granted, free of charge, to any person obtaining -// a copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to -// permit persons to whom the Software is furnished to do so, subject to -// the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -// Package uuid provides implementation of Universally Unique Identifier (UUID). -// Supported versions are 1, 3, 4 and 5 (as specified in RFC 4122) and -// version 2 (as specified in DCE 1.1). -package uuid - -import ( - "bytes" - "crypto/md5" - "crypto/rand" - "crypto/sha1" - "database/sql/driver" - "encoding/binary" - "encoding/hex" - "fmt" - "hash" - "net" - "os" - "sync" - "time" -) - -// UUID layout variants. -const ( - VariantNCS = iota - VariantRFC4122 - VariantMicrosoft - VariantFuture -) - -// UUID DCE domains. -const ( - DomainPerson = iota - DomainGroup - DomainOrg -) - -// Difference in 100-nanosecond intervals between -// UUID epoch (October 15, 1582) and Unix epoch (January 1, 1970). -const epochStart = 122192928000000000 - -// Used in string method conversion -const dash byte = '-' - -// UUID v1/v2 storage. -var ( - storageMutex sync.Mutex - storageOnce sync.Once - epochFunc = unixTimeFunc - clockSequence uint16 - lastTime uint64 - hardwareAddr [6]byte - posixUID = uint32(os.Getuid()) - posixGID = uint32(os.Getgid()) -) - -// String parse helpers. -var ( - urnPrefix = []byte("urn:uuid:") - byteGroups = []int{8, 4, 4, 4, 12} -) - -func initClockSequence() { - buf := make([]byte, 2) - safeRandom(buf) - clockSequence = binary.BigEndian.Uint16(buf) -} - -func initHardwareAddr() { - interfaces, err := net.Interfaces() - if err == nil { - for _, iface := range interfaces { - if len(iface.HardwareAddr) >= 6 { - copy(hardwareAddr[:], iface.HardwareAddr) - return - } - } - } - - // Initialize hardwareAddr randomly in case - // of real network interfaces absence - safeRandom(hardwareAddr[:]) - - // Set multicast bit as recommended in RFC 4122 - hardwareAddr[0] |= 0x01 -} - -func initStorage() { - initClockSequence() - initHardwareAddr() -} - -func safeRandom(dest []byte) { - if _, err := rand.Read(dest); err != nil { - panic(err) - } -} - -// Returns difference in 100-nanosecond intervals between -// UUID epoch (October 15, 1582) and current time. -// This is default epoch calculation function. -func unixTimeFunc() uint64 { - return epochStart + uint64(time.Now().UnixNano()/100) -} - -// UUID representation compliant with specification -// described in RFC 4122. -type UUID [16]byte - -// NullUUID can be used with the standard sql package to represent a -// UUID value that can be NULL in the database -type NullUUID struct { - UUID UUID - Valid bool -} - -// The nil UUID is special form of UUID that is specified to have all -// 128 bits set to zero. -var Nil = UUID{} - -// Predefined namespace UUIDs. -var ( - NamespaceDNS, _ = FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - NamespaceURL, _ = FromString("6ba7b811-9dad-11d1-80b4-00c04fd430c8") - NamespaceOID, _ = FromString("6ba7b812-9dad-11d1-80b4-00c04fd430c8") - NamespaceX500, _ = FromString("6ba7b814-9dad-11d1-80b4-00c04fd430c8") -) - -// And returns result of binary AND of two UUIDs. -func And(u1 UUID, u2 UUID) UUID { - u := UUID{} - for i := 0; i < 16; i++ { - u[i] = u1[i] & u2[i] - } - return u -} - -// Or returns result of binary OR of two UUIDs. -func Or(u1 UUID, u2 UUID) UUID { - u := UUID{} - for i := 0; i < 16; i++ { - u[i] = u1[i] | u2[i] - } - return u -} - -// Equal returns true if u1 and u2 equals, otherwise returns false. -func Equal(u1 UUID, u2 UUID) bool { - return bytes.Equal(u1[:], u2[:]) -} - -// Version returns algorithm version used to generate UUID. -func (u UUID) Version() uint { - return uint(u[6] >> 4) -} - -// Variant returns UUID layout variant. -func (u UUID) Variant() uint { - switch { - case (u[8] & 0x80) == 0x00: - return VariantNCS - case (u[8]&0xc0)|0x80 == 0x80: - return VariantRFC4122 - case (u[8]&0xe0)|0xc0 == 0xc0: - return VariantMicrosoft - } - return VariantFuture -} - -// Bytes returns bytes slice representation of UUID. -func (u UUID) Bytes() []byte { - return u[:] -} - -// Returns canonical string representation of UUID: -// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx. -func (u UUID) String() string { - buf := make([]byte, 36) - - hex.Encode(buf[0:8], u[0:4]) - buf[8] = dash - hex.Encode(buf[9:13], u[4:6]) - buf[13] = dash - hex.Encode(buf[14:18], u[6:8]) - buf[18] = dash - hex.Encode(buf[19:23], u[8:10]) - buf[23] = dash - hex.Encode(buf[24:], u[10:]) - - return string(buf) -} - -// SetVersion sets version bits. -func (u *UUID) SetVersion(v byte) { - u[6] = (u[6] & 0x0f) | (v << 4) -} - -// SetVariant sets variant bits as described in RFC 4122. -func (u *UUID) SetVariant() { - u[8] = (u[8] & 0xbf) | 0x80 -} - -// MarshalText implements the encoding.TextMarshaler interface. -// The encoding is the same as returned by String. -func (u UUID) MarshalText() (text []byte, err error) { - text = []byte(u.String()) - return -} - -// UnmarshalText implements the encoding.TextUnmarshaler interface. -// Following formats are supported: -// "6ba7b810-9dad-11d1-80b4-00c04fd430c8", -// "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}", -// "urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8" -func (u *UUID) UnmarshalText(text []byte) (err error) { - if len(text) < 32 { - err = fmt.Errorf("uuid: UUID string too short: %s", text) - return - } - - t := text[:] - braced := false - - if bytes.Equal(t[:9], urnPrefix) { - t = t[9:] - } else if t[0] == '{' { - braced = true - t = t[1:] - } - - b := u[:] - - for i, byteGroup := range byteGroups { - if i > 0 && t[0] == '-' { - t = t[1:] - } else if i > 0 && t[0] != '-' { - err = fmt.Errorf("uuid: invalid string format") - return - } - - if i == 2 { - if !bytes.Contains([]byte("012345"), []byte{t[0]}) { - err = fmt.Errorf("uuid: invalid version number: %s", t[0]) - return - } - } - - if len(t) < byteGroup { - err = fmt.Errorf("uuid: UUID string too short: %s", text) - return - } - - if i == 4 && len(t) > byteGroup && - ((braced && t[byteGroup] != '}') || len(t[byteGroup:]) > 1 || !braced) { - err = fmt.Errorf("uuid: UUID string too long: %s", t) - return - } - - _, err = hex.Decode(b[:byteGroup/2], t[:byteGroup]) - - if err != nil { - return - } - - t = t[byteGroup:] - b = b[byteGroup/2:] - } - - return -} - -// MarshalBinary implements the encoding.BinaryMarshaler interface. -func (u UUID) MarshalBinary() (data []byte, err error) { - data = u.Bytes() - return -} - -// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. -// It will return error if the slice isn't 16 bytes long. -func (u *UUID) UnmarshalBinary(data []byte) (err error) { - if len(data) != 16 { - err = fmt.Errorf("uuid: UUID must be exactly 16 bytes long, got %d bytes", len(data)) - return - } - copy(u[:], data) - - return -} - -// Value implements the driver.Valuer interface. -func (u UUID) Value() (driver.Value, error) { - return u.String(), nil -} - -// Scan implements the sql.Scanner interface. -// A 16-byte slice is handled by UnmarshalBinary, while -// a longer byte slice or a string is handled by UnmarshalText. -func (u *UUID) Scan(src interface{}) error { - switch src := src.(type) { - case []byte: - if len(src) == 16 { - return u.UnmarshalBinary(src) - } - return u.UnmarshalText(src) - - case string: - return u.UnmarshalText([]byte(src)) - } - - return fmt.Errorf("uuid: cannot convert %T to UUID", src) -} - -// Value implements the driver.Valuer interface. -func (u NullUUID) Value() (driver.Value, error) { - if !u.Valid { - return nil, nil - } - // Delegate to UUID Value function - return u.UUID.Value() -} - -// Scan implements the sql.Scanner interface. -func (u *NullUUID) Scan(src interface{}) error { - if src == nil { - u.UUID, u.Valid = Nil, false - return nil - } - - // Delegate to UUID Scan function - u.Valid = true - return u.UUID.Scan(src) -} - -// FromBytes returns UUID converted from raw byte slice input. -// It will return error if the slice isn't 16 bytes long. -func FromBytes(input []byte) (u UUID, err error) { - err = u.UnmarshalBinary(input) - return -} - -// FromBytesOrNil returns UUID converted from raw byte slice input. -// Same behavior as FromBytes, but returns a Nil UUID on error. -func FromBytesOrNil(input []byte) UUID { - uuid, err := FromBytes(input) - if err != nil { - return Nil - } - return uuid -} - -// FromString returns UUID parsed from string input. -// Input is expected in a form accepted by UnmarshalText. -func FromString(input string) (u UUID, err error) { - err = u.UnmarshalText([]byte(input)) - return -} - -// FromStringOrNil returns UUID parsed from string input. -// Same behavior as FromString, but returns a Nil UUID on error. -func FromStringOrNil(input string) UUID { - uuid, err := FromString(input) - if err != nil { - return Nil - } - return uuid -} - -// Returns UUID v1/v2 storage state. -// Returns epoch timestamp, clock sequence, and hardware address. -func getStorage() (uint64, uint16, []byte) { - storageOnce.Do(initStorage) - - storageMutex.Lock() - defer storageMutex.Unlock() - - timeNow := epochFunc() - // Clock changed backwards since last UUID generation. - // Should increase clock sequence. - if timeNow <= lastTime { - clockSequence++ - } - lastTime = timeNow - - return timeNow, clockSequence, hardwareAddr[:] -} - -// NewV1 returns UUID based on current timestamp and MAC address. -func NewV1() UUID { - u := UUID{} - - timeNow, clockSeq, hardwareAddr := getStorage() - - binary.BigEndian.PutUint32(u[0:], uint32(timeNow)) - binary.BigEndian.PutUint16(u[4:], uint16(timeNow>>32)) - binary.BigEndian.PutUint16(u[6:], uint16(timeNow>>48)) - binary.BigEndian.PutUint16(u[8:], clockSeq) - - copy(u[10:], hardwareAddr) - - u.SetVersion(1) - u.SetVariant() - - return u -} - -// NewV2 returns DCE Security UUID based on POSIX UID/GID. -func NewV2(domain byte) UUID { - u := UUID{} - - timeNow, clockSeq, hardwareAddr := getStorage() - - switch domain { - case DomainPerson: - binary.BigEndian.PutUint32(u[0:], posixUID) - case DomainGroup: - binary.BigEndian.PutUint32(u[0:], posixGID) - } - - binary.BigEndian.PutUint16(u[4:], uint16(timeNow>>32)) - binary.BigEndian.PutUint16(u[6:], uint16(timeNow>>48)) - binary.BigEndian.PutUint16(u[8:], clockSeq) - u[9] = domain - - copy(u[10:], hardwareAddr) - - u.SetVersion(2) - u.SetVariant() - - return u -} - -// NewV3 returns UUID based on MD5 hash of namespace UUID and name. -func NewV3(ns UUID, name string) UUID { - u := newFromHash(md5.New(), ns, name) - u.SetVersion(3) - u.SetVariant() - - return u -} - -// NewV4 returns random generated UUID. -func NewV4() UUID { - u := UUID{} - safeRandom(u[:]) - u.SetVersion(4) - u.SetVariant() - - return u -} - -// NewV5 returns UUID based on SHA-1 hash of namespace UUID and name. -func NewV5(ns UUID, name string) UUID { - u := newFromHash(sha1.New(), ns, name) - u.SetVersion(5) - u.SetVariant() - - return u -} - -// Returns UUID based on hashing of namespace UUID and name. -func newFromHash(h hash.Hash, ns UUID, name string) UUID { - u := UUID{} - h.Write(ns[:]) - h.Write([]byte(name)) - copy(u[:], h.Sum(nil)) - - return u -} diff --git a/vendor/golang.org/x/crypto/AUTHORS b/vendor/golang.org/x/crypto/AUTHORS deleted file mode 100644 index 15167cd..0000000 --- a/vendor/golang.org/x/crypto/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/crypto/CONTRIBUTORS b/vendor/golang.org/x/crypto/CONTRIBUTORS deleted file mode 100644 index 1c4577e..0000000 --- a/vendor/golang.org/x/crypto/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/crypto/LICENSE b/vendor/golang.org/x/crypto/LICENSE deleted file mode 100644 index 6a66aea..0000000 --- a/vendor/golang.org/x/crypto/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/crypto/PATENTS b/vendor/golang.org/x/crypto/PATENTS deleted file mode 100644 index 7330990..0000000 --- a/vendor/golang.org/x/crypto/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/crypto/pbkdf2/pbkdf2.go b/vendor/golang.org/x/crypto/pbkdf2/pbkdf2.go deleted file mode 100644 index 593f653..0000000 --- a/vendor/golang.org/x/crypto/pbkdf2/pbkdf2.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package pbkdf2 implements the key derivation function PBKDF2 as defined in RFC -2898 / PKCS #5 v2.0. - -A key derivation function is useful when encrypting data based on a password -or any other not-fully-random data. It uses a pseudorandom function to derive -a secure encryption key based on the password. - -While v2.0 of the standard defines only one pseudorandom function to use, -HMAC-SHA1, the drafted v2.1 specification allows use of all five FIPS Approved -Hash Functions SHA-1, SHA-224, SHA-256, SHA-384 and SHA-512 for HMAC. To -choose, you can pass the `New` functions from the different SHA packages to -pbkdf2.Key. -*/ -package pbkdf2 // import "golang.org/x/crypto/pbkdf2" - -import ( - "crypto/hmac" - "hash" -) - -// Key derives a key from the password, salt and iteration count, returning a -// []byte of length keylen that can be used as cryptographic key. The key is -// derived based on the method described as PBKDF2 with the HMAC variant using -// the supplied hash function. -// -// For example, to use a HMAC-SHA-1 based PBKDF2 key derivation function, you -// can get a derived key for e.g. AES-256 (which needs a 32-byte key) by -// doing: -// -// dk := pbkdf2.Key([]byte("some password"), salt, 4096, 32, sha1.New) -// -// Remember to get a good random salt. At least 8 bytes is recommended by the -// RFC. -// -// Using a higher iteration count will increase the cost of an exhaustive -// search but will also make derivation proportionally slower. -func Key(password, salt []byte, iter, keyLen int, h func() hash.Hash) []byte { - prf := hmac.New(h, password) - hashLen := prf.Size() - numBlocks := (keyLen + hashLen - 1) / hashLen - - var buf [4]byte - dk := make([]byte, 0, numBlocks*hashLen) - U := make([]byte, hashLen) - for block := 1; block <= numBlocks; block++ { - // N.B.: || means concatenation, ^ means XOR - // for each block T_i = U_1 ^ U_2 ^ ... ^ U_iter - // U_1 = PRF(password, salt || uint(i)) - prf.Reset() - prf.Write(salt) - buf[0] = byte(block >> 24) - buf[1] = byte(block >> 16) - buf[2] = byte(block >> 8) - buf[3] = byte(block) - prf.Write(buf[:4]) - dk = prf.Sum(dk) - T := dk[len(dk)-hashLen:] - copy(U, T) - - // U_n = PRF(password, U_(n-1)) - for n := 2; n <= iter; n++ { - prf.Reset() - prf.Write(U) - U = U[:0] - U = prf.Sum(U) - for x := range U { - T[x] ^= U[x] - } - } - } - return dk[:keyLen] -} diff --git a/vendor/golang.org/x/crypto/scrypt/scrypt.go b/vendor/golang.org/x/crypto/scrypt/scrypt.go deleted file mode 100644 index 7455395..0000000 --- a/vendor/golang.org/x/crypto/scrypt/scrypt.go +++ /dev/null @@ -1,243 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package scrypt implements the scrypt key derivation function as defined in -// Colin Percival's paper "Stronger Key Derivation via Sequential Memory-Hard -// Functions" (http://www.tarsnap.com/scrypt/scrypt.pdf). -package scrypt // import "golang.org/x/crypto/scrypt" - -import ( - "crypto/sha256" - "errors" - - "golang.org/x/crypto/pbkdf2" -) - -const maxInt = int(^uint(0) >> 1) - -// blockCopy copies n numbers from src into dst. -func blockCopy(dst, src []uint32, n int) { - copy(dst, src[:n]) -} - -// blockXOR XORs numbers from dst with n numbers from src. -func blockXOR(dst, src []uint32, n int) { - for i, v := range src[:n] { - dst[i] ^= v - } -} - -// salsaXOR applies Salsa20/8 to the XOR of 16 numbers from tmp and in, -// and puts the result into both both tmp and out. -func salsaXOR(tmp *[16]uint32, in, out []uint32) { - w0 := tmp[0] ^ in[0] - w1 := tmp[1] ^ in[1] - w2 := tmp[2] ^ in[2] - w3 := tmp[3] ^ in[3] - w4 := tmp[4] ^ in[4] - w5 := tmp[5] ^ in[5] - w6 := tmp[6] ^ in[6] - w7 := tmp[7] ^ in[7] - w8 := tmp[8] ^ in[8] - w9 := tmp[9] ^ in[9] - w10 := tmp[10] ^ in[10] - w11 := tmp[11] ^ in[11] - w12 := tmp[12] ^ in[12] - w13 := tmp[13] ^ in[13] - w14 := tmp[14] ^ in[14] - w15 := tmp[15] ^ in[15] - - x0, x1, x2, x3, x4, x5, x6, x7, x8 := w0, w1, w2, w3, w4, w5, w6, w7, w8 - x9, x10, x11, x12, x13, x14, x15 := w9, w10, w11, w12, w13, w14, w15 - - for i := 0; i < 8; i += 2 { - u := x0 + x12 - x4 ^= u<<7 | u>>(32-7) - u = x4 + x0 - x8 ^= u<<9 | u>>(32-9) - u = x8 + x4 - x12 ^= u<<13 | u>>(32-13) - u = x12 + x8 - x0 ^= u<<18 | u>>(32-18) - - u = x5 + x1 - x9 ^= u<<7 | u>>(32-7) - u = x9 + x5 - x13 ^= u<<9 | u>>(32-9) - u = x13 + x9 - x1 ^= u<<13 | u>>(32-13) - u = x1 + x13 - x5 ^= u<<18 | u>>(32-18) - - u = x10 + x6 - x14 ^= u<<7 | u>>(32-7) - u = x14 + x10 - x2 ^= u<<9 | u>>(32-9) - u = x2 + x14 - x6 ^= u<<13 | u>>(32-13) - u = x6 + x2 - x10 ^= u<<18 | u>>(32-18) - - u = x15 + x11 - x3 ^= u<<7 | u>>(32-7) - u = x3 + x15 - x7 ^= u<<9 | u>>(32-9) - u = x7 + x3 - x11 ^= u<<13 | u>>(32-13) - u = x11 + x7 - x15 ^= u<<18 | u>>(32-18) - - u = x0 + x3 - x1 ^= u<<7 | u>>(32-7) - u = x1 + x0 - x2 ^= u<<9 | u>>(32-9) - u = x2 + x1 - x3 ^= u<<13 | u>>(32-13) - u = x3 + x2 - x0 ^= u<<18 | u>>(32-18) - - u = x5 + x4 - x6 ^= u<<7 | u>>(32-7) - u = x6 + x5 - x7 ^= u<<9 | u>>(32-9) - u = x7 + x6 - x4 ^= u<<13 | u>>(32-13) - u = x4 + x7 - x5 ^= u<<18 | u>>(32-18) - - u = x10 + x9 - x11 ^= u<<7 | u>>(32-7) - u = x11 + x10 - x8 ^= u<<9 | u>>(32-9) - u = x8 + x11 - x9 ^= u<<13 | u>>(32-13) - u = x9 + x8 - x10 ^= u<<18 | u>>(32-18) - - u = x15 + x14 - x12 ^= u<<7 | u>>(32-7) - u = x12 + x15 - x13 ^= u<<9 | u>>(32-9) - u = x13 + x12 - x14 ^= u<<13 | u>>(32-13) - u = x14 + x13 - x15 ^= u<<18 | u>>(32-18) - } - x0 += w0 - x1 += w1 - x2 += w2 - x3 += w3 - x4 += w4 - x5 += w5 - x6 += w6 - x7 += w7 - x8 += w8 - x9 += w9 - x10 += w10 - x11 += w11 - x12 += w12 - x13 += w13 - x14 += w14 - x15 += w15 - - out[0], tmp[0] = x0, x0 - out[1], tmp[1] = x1, x1 - out[2], tmp[2] = x2, x2 - out[3], tmp[3] = x3, x3 - out[4], tmp[4] = x4, x4 - out[5], tmp[5] = x5, x5 - out[6], tmp[6] = x6, x6 - out[7], tmp[7] = x7, x7 - out[8], tmp[8] = x8, x8 - out[9], tmp[9] = x9, x9 - out[10], tmp[10] = x10, x10 - out[11], tmp[11] = x11, x11 - out[12], tmp[12] = x12, x12 - out[13], tmp[13] = x13, x13 - out[14], tmp[14] = x14, x14 - out[15], tmp[15] = x15, x15 -} - -func blockMix(tmp *[16]uint32, in, out []uint32, r int) { - blockCopy(tmp[:], in[(2*r-1)*16:], 16) - for i := 0; i < 2*r; i += 2 { - salsaXOR(tmp, in[i*16:], out[i*8:]) - salsaXOR(tmp, in[i*16+16:], out[i*8+r*16:]) - } -} - -func integer(b []uint32, r int) uint64 { - j := (2*r - 1) * 16 - return uint64(b[j]) | uint64(b[j+1])<<32 -} - -func smix(b []byte, r, N int, v, xy []uint32) { - var tmp [16]uint32 - x := xy - y := xy[32*r:] - - j := 0 - for i := 0; i < 32*r; i++ { - x[i] = uint32(b[j]) | uint32(b[j+1])<<8 | uint32(b[j+2])<<16 | uint32(b[j+3])<<24 - j += 4 - } - for i := 0; i < N; i += 2 { - blockCopy(v[i*(32*r):], x, 32*r) - blockMix(&tmp, x, y, r) - - blockCopy(v[(i+1)*(32*r):], y, 32*r) - blockMix(&tmp, y, x, r) - } - for i := 0; i < N; i += 2 { - j := int(integer(x, r) & uint64(N-1)) - blockXOR(x, v[j*(32*r):], 32*r) - blockMix(&tmp, x, y, r) - - j = int(integer(y, r) & uint64(N-1)) - blockXOR(y, v[j*(32*r):], 32*r) - blockMix(&tmp, y, x, r) - } - j = 0 - for _, v := range x[:32*r] { - b[j+0] = byte(v >> 0) - b[j+1] = byte(v >> 8) - b[j+2] = byte(v >> 16) - b[j+3] = byte(v >> 24) - j += 4 - } -} - -// Key derives a key from the password, salt, and cost parameters, returning -// a byte slice of length keyLen that can be used as cryptographic key. -// -// N is a CPU/memory cost parameter, which must be a power of two greater than 1. -// r and p must satisfy r * p < 2³⁰. If the parameters do not satisfy the -// limits, the function returns a nil byte slice and an error. -// -// For example, you can get a derived key for e.g. AES-256 (which needs a -// 32-byte key) by doing: -// -// dk, err := scrypt.Key([]byte("some password"), salt, 16384, 8, 1, 32) -// -// The recommended parameters for interactive logins as of 2009 are N=16384, -// r=8, p=1. They should be increased as memory latency and CPU parallelism -// increases. Remember to get a good random salt. -func Key(password, salt []byte, N, r, p, keyLen int) ([]byte, error) { - if N <= 1 || N&(N-1) != 0 { - return nil, errors.New("scrypt: N must be > 1 and a power of 2") - } - if uint64(r)*uint64(p) >= 1<<30 || r > maxInt/128/p || r > maxInt/256 || N > maxInt/128/r { - return nil, errors.New("scrypt: parameters are too large") - } - - xy := make([]uint32, 64*r) - v := make([]uint32, 32*N*r) - b := pbkdf2.Key(password, salt, 1, p*128*r, sha256.New) - - for i := 0; i < p; i++ { - smix(b[i*128*r:], r, N, v, xy) - } - - return pbkdf2.Key(password, b, 1, keyLen, sha256.New), nil -} diff --git a/vendor/google.golang.org/genproto/LICENSE b/vendor/google.golang.org/genproto/LICENSE deleted file mode 100644 index d645695..0000000 --- a/vendor/google.golang.org/genproto/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go deleted file mode 100644 index b37ccee..0000000 --- a/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go +++ /dev/null @@ -1,65 +0,0 @@ -// Code generated by protoc-gen-go. -// source: google/api/annotations.proto -// DO NOT EDIT! - -/* -Package annotations is a generated protocol buffer package. - -It is generated from these files: - google/api/annotations.proto - google/api/http.proto - -It has these top-level messages: - Http - HttpRule - CustomHttpPattern -*/ -package annotations - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -var E_Http = &proto.ExtensionDesc{ - ExtendedType: (*google_protobuf.MethodOptions)(nil), - ExtensionType: (*HttpRule)(nil), - Field: 72295728, - Name: "google.api.http", - Tag: "bytes,72295728,opt,name=http", - Filename: "google/api/annotations.proto", -} - -func init() { - proto.RegisterExtension(E_Http) -} - -func init() { proto.RegisterFile("google/api/annotations.proto", fileDescriptor0) } - -var fileDescriptor0 = []byte{ - // 208 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0xcf, 0xcf, 0x4f, - 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xcc, 0xcb, 0xcb, 0x2f, 0x49, 0x2c, 0xc9, 0xcc, - 0xcf, 0x2b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0xc8, 0xea, 0x25, 0x16, 0x64, - 0x4a, 0x89, 0x22, 0xa9, 0xcc, 0x28, 0x29, 0x29, 0x80, 0x28, 0x91, 0x52, 0x80, 0x0a, 0x83, 0x79, - 0x49, 0xa5, 0x69, 0xfa, 0x29, 0xa9, 0xc5, 0xc9, 0x45, 0x99, 0x05, 0x25, 0xf9, 0x45, 0x10, 0x15, - 0x56, 0xde, 0x5c, 0x2c, 0x20, 0xf5, 0x42, 0x72, 0x7a, 0x50, 0xd3, 0x60, 0x4a, 0xf5, 0x7c, 0x53, - 0x4b, 0x32, 0xf2, 0x53, 0xfc, 0x0b, 0xc0, 0x56, 0x4a, 0x6c, 0x38, 0xb5, 0x47, 0x49, 0x81, 0x51, - 0x83, 0xdb, 0x48, 0x44, 0x0f, 0x61, 0xad, 0x9e, 0x47, 0x49, 0x49, 0x41, 0x50, 0x69, 0x4e, 0x6a, - 0x10, 0xd8, 0x10, 0xa7, 0x3c, 0x2e, 0xbe, 0xe4, 0xfc, 0x5c, 0x24, 0x05, 0x4e, 0x02, 0x8e, 0x08, - 0x67, 0x07, 0x80, 0x4c, 0x0e, 0x60, 0x8c, 0x72, 0x84, 0xca, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, - 0xeb, 0xe5, 0x17, 0xa5, 0xeb, 0xa7, 0xa7, 0xe6, 0x81, 0xed, 0xd5, 0x87, 0x48, 0x25, 0x16, 0x64, - 0x16, 0xa3, 0x7b, 0xda, 0x1a, 0x89, 0xbd, 0x88, 0x89, 0xc5, 0xdd, 0x31, 0xc0, 0x33, 0x89, 0x0d, - 0xac, 0xc9, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xe3, 0x29, 0x19, 0x62, 0x28, 0x01, 0x00, 0x00, -} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go deleted file mode 100644 index 583ecf6..0000000 --- a/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go +++ /dev/null @@ -1,567 +0,0 @@ -// Code generated by protoc-gen-go. -// source: google/api/http.proto -// DO NOT EDIT! - -package annotations - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// Defines the HTTP configuration for a service. It contains a list of -// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method -// to one or more HTTP REST API methods. -type Http struct { - // A list of HTTP configuration rules that apply to individual API methods. - // - // **NOTE:** All service configuration rules follow "last one wins" order. - Rules []*HttpRule `protobuf:"bytes,1,rep,name=rules" json:"rules,omitempty"` -} - -func (m *Http) Reset() { *m = Http{} } -func (m *Http) String() string { return proto.CompactTextString(m) } -func (*Http) ProtoMessage() {} -func (*Http) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } - -func (m *Http) GetRules() []*HttpRule { - if m != nil { - return m.Rules - } - return nil -} - -// `HttpRule` defines the mapping of an RPC method to one or more HTTP -// REST APIs. The mapping determines what portions of the request -// message are populated from the path, query parameters, or body of -// the HTTP request. The mapping is typically specified as an -// `google.api.http` annotation, see "google/api/annotations.proto" -// for details. -// -// The mapping consists of a field specifying the path template and -// method kind. The path template can refer to fields in the request -// message, as in the example below which describes a REST GET -// operation on a resource collection of messages: -// -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; -// } -// } -// message GetMessageRequest { -// message SubMessage { -// string subfield = 1; -// } -// string message_id = 1; // mapped to the URL -// SubMessage sub = 2; // `sub.subfield` is url-mapped -// } -// message Message { -// string text = 1; // content of the resource -// } -// -// The same http annotation can alternatively be expressed inside the -// `GRPC API Configuration` YAML file. -// -// http: -// rules: -// - selector: .Messaging.GetMessage -// get: /v1/messages/{message_id}/{sub.subfield} -// -// This definition enables an automatic, bidrectional mapping of HTTP -// JSON to RPC. Example: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` -// -// In general, not only fields but also field paths can be referenced -// from a path pattern. Fields mapped to the path pattern cannot be -// repeated and must have a primitive (non-message) type. -// -// Any fields in the request message which are not bound by the path -// pattern automatically become (optional) HTTP query -// parameters. Assume the following definition of the request message: -// -// -// message GetMessageRequest { -// message SubMessage { -// string subfield = 1; -// } -// string message_id = 1; // mapped to the URL -// int64 revision = 2; // becomes a parameter -// SubMessage sub = 3; // `sub.subfield` becomes a parameter -// } -// -// -// This enables a HTTP JSON to RPC mapping as below: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` -// -// Note that fields which are mapped to HTTP parameters must have a -// primitive type or a repeated primitive type. Message types are not -// allowed. In the case of a repeated type, the parameter can be -// repeated in the URL, as in `...?param=A¶m=B`. -// -// For HTTP method kinds which allow a request body, the `body` field -// specifies the mapping. Consider a REST update method on the -// message resource collection: -// -// -// service Messaging { -// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { -// option (google.api.http) = { -// put: "/v1/messages/{message_id}" -// body: "message" -// }; -// } -// } -// message UpdateMessageRequest { -// string message_id = 1; // mapped to the URL -// Message message = 2; // mapped to the body -// } -// -// -// The following HTTP JSON to RPC mapping is enabled, where the -// representation of the JSON in the request body is determined by -// protos JSON encoding: -// -// HTTP | RPC -// -----|----- -// `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` -// -// The special name `*` can be used in the body mapping to define that -// every field not bound by the path template should be mapped to the -// request body. This enables the following alternative definition of -// the update method: -// -// service Messaging { -// rpc UpdateMessage(Message) returns (Message) { -// option (google.api.http) = { -// put: "/v1/messages/{message_id}" -// body: "*" -// }; -// } -// } -// message Message { -// string message_id = 1; -// string text = 2; -// } -// -// -// The following HTTP JSON to RPC mapping is enabled: -// -// HTTP | RPC -// -----|----- -// `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` -// -// Note that when using `*` in the body mapping, it is not possible to -// have HTTP parameters, as all fields not bound by the path end in -// the body. This makes this option more rarely used in practice of -// defining REST APIs. The common usage of `*` is in custom methods -// which don't use the URL at all for transferring data. -// -// It is possible to define multiple HTTP methods for one RPC by using -// the `additional_bindings` option. Example: -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http) = { -// get: "/v1/messages/{message_id}" -// additional_bindings { -// get: "/v1/users/{user_id}/messages/{message_id}" -// } -// }; -// } -// } -// message GetMessageRequest { -// string message_id = 1; -// string user_id = 2; -// } -// -// -// This enables the following two alternative HTTP JSON to RPC -// mappings: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` -// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` -// -// # Rules for HTTP mapping -// -// The rules for mapping HTTP path, query parameters, and body fields -// to the request message are as follows: -// -// 1. The `body` field specifies either `*` or a field path, or is -// omitted. If omitted, it assumes there is no HTTP body. -// 2. Leaf fields (recursive expansion of nested messages in the -// request) can be classified into three types: -// (a) Matched in the URL template. -// (b) Covered by body (if body is `*`, everything except (a) fields; -// else everything under the body field) -// (c) All other fields. -// 3. URL query parameters found in the HTTP request are mapped to (c) fields. -// 4. Any body sent with an HTTP request can contain only (b) fields. -// -// The syntax of the path template is as follows: -// -// Template = "/" Segments [ Verb ] ; -// Segments = Segment { "/" Segment } ; -// Segment = "*" | "**" | LITERAL | Variable ; -// Variable = "{" FieldPath [ "=" Segments ] "}" ; -// FieldPath = IDENT { "." IDENT } ; -// Verb = ":" LITERAL ; -// -// The syntax `*` matches a single path segment. It follows the semantics of -// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String -// Expansion. -// -// The syntax `**` matches zero or more path segments. It follows the semantics -// of [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.3 Reserved -// Expansion. NOTE: it must be the last segment in the path except the Verb. -// -// The syntax `LITERAL` matches literal text in the URL path. -// -// The syntax `Variable` matches the entire path as specified by its template; -// this nested template must not contain further variables. If a variable -// matches a single path segment, its template may be omitted, e.g. `{var}` -// is equivalent to `{var=*}`. -// -// NOTE: the field paths in variables and in the `body` must not refer to -// repeated fields or map fields. -// -// Use CustomHttpPattern to specify any HTTP method that is not included in the -// `pattern` field, such as HEAD, or "*" to leave the HTTP method unspecified for -// a given URL path rule. The wild-card rule is useful for services that provide -// content to Web (HTML) clients. -type HttpRule struct { - // Selects methods to which this rule applies. - // - // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. - Selector string `protobuf:"bytes,1,opt,name=selector" json:"selector,omitempty"` - // Determines the URL pattern is matched by this rules. This pattern can be - // used with any of the {get|put|post|delete|patch} methods. A custom method - // can be defined using the 'custom' field. - // - // Types that are valid to be assigned to Pattern: - // *HttpRule_Get - // *HttpRule_Put - // *HttpRule_Post - // *HttpRule_Delete - // *HttpRule_Patch - // *HttpRule_Custom - Pattern isHttpRule_Pattern `protobuf_oneof:"pattern"` - // The name of the request field whose value is mapped to the HTTP body, or - // `*` for mapping all fields not captured by the path pattern to the HTTP - // body. NOTE: the referred field must not be a repeated field and must be - // present at the top-level of request message type. - Body string `protobuf:"bytes,7,opt,name=body" json:"body,omitempty"` - // Additional HTTP bindings for the selector. Nested bindings must - // not contain an `additional_bindings` field themselves (that is, - // the nesting may only be one level deep). - AdditionalBindings []*HttpRule `protobuf:"bytes,11,rep,name=additional_bindings,json=additionalBindings" json:"additional_bindings,omitempty"` -} - -func (m *HttpRule) Reset() { *m = HttpRule{} } -func (m *HttpRule) String() string { return proto.CompactTextString(m) } -func (*HttpRule) ProtoMessage() {} -func (*HttpRule) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} } - -type isHttpRule_Pattern interface { - isHttpRule_Pattern() -} - -type HttpRule_Get struct { - Get string `protobuf:"bytes,2,opt,name=get,oneof"` -} -type HttpRule_Put struct { - Put string `protobuf:"bytes,3,opt,name=put,oneof"` -} -type HttpRule_Post struct { - Post string `protobuf:"bytes,4,opt,name=post,oneof"` -} -type HttpRule_Delete struct { - Delete string `protobuf:"bytes,5,opt,name=delete,oneof"` -} -type HttpRule_Patch struct { - Patch string `protobuf:"bytes,6,opt,name=patch,oneof"` -} -type HttpRule_Custom struct { - Custom *CustomHttpPattern `protobuf:"bytes,8,opt,name=custom,oneof"` -} - -func (*HttpRule_Get) isHttpRule_Pattern() {} -func (*HttpRule_Put) isHttpRule_Pattern() {} -func (*HttpRule_Post) isHttpRule_Pattern() {} -func (*HttpRule_Delete) isHttpRule_Pattern() {} -func (*HttpRule_Patch) isHttpRule_Pattern() {} -func (*HttpRule_Custom) isHttpRule_Pattern() {} - -func (m *HttpRule) GetPattern() isHttpRule_Pattern { - if m != nil { - return m.Pattern - } - return nil -} - -func (m *HttpRule) GetSelector() string { - if m != nil { - return m.Selector - } - return "" -} - -func (m *HttpRule) GetGet() string { - if x, ok := m.GetPattern().(*HttpRule_Get); ok { - return x.Get - } - return "" -} - -func (m *HttpRule) GetPut() string { - if x, ok := m.GetPattern().(*HttpRule_Put); ok { - return x.Put - } - return "" -} - -func (m *HttpRule) GetPost() string { - if x, ok := m.GetPattern().(*HttpRule_Post); ok { - return x.Post - } - return "" -} - -func (m *HttpRule) GetDelete() string { - if x, ok := m.GetPattern().(*HttpRule_Delete); ok { - return x.Delete - } - return "" -} - -func (m *HttpRule) GetPatch() string { - if x, ok := m.GetPattern().(*HttpRule_Patch); ok { - return x.Patch - } - return "" -} - -func (m *HttpRule) GetCustom() *CustomHttpPattern { - if x, ok := m.GetPattern().(*HttpRule_Custom); ok { - return x.Custom - } - return nil -} - -func (m *HttpRule) GetBody() string { - if m != nil { - return m.Body - } - return "" -} - -func (m *HttpRule) GetAdditionalBindings() []*HttpRule { - if m != nil { - return m.AdditionalBindings - } - return nil -} - -// XXX_OneofFuncs is for the internal use of the proto package. -func (*HttpRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { - return _HttpRule_OneofMarshaler, _HttpRule_OneofUnmarshaler, _HttpRule_OneofSizer, []interface{}{ - (*HttpRule_Get)(nil), - (*HttpRule_Put)(nil), - (*HttpRule_Post)(nil), - (*HttpRule_Delete)(nil), - (*HttpRule_Patch)(nil), - (*HttpRule_Custom)(nil), - } -} - -func _HttpRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { - m := msg.(*HttpRule) - // pattern - switch x := m.Pattern.(type) { - case *HttpRule_Get: - b.EncodeVarint(2<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Get) - case *HttpRule_Put: - b.EncodeVarint(3<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Put) - case *HttpRule_Post: - b.EncodeVarint(4<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Post) - case *HttpRule_Delete: - b.EncodeVarint(5<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Delete) - case *HttpRule_Patch: - b.EncodeVarint(6<<3 | proto.WireBytes) - b.EncodeStringBytes(x.Patch) - case *HttpRule_Custom: - b.EncodeVarint(8<<3 | proto.WireBytes) - if err := b.EncodeMessage(x.Custom); err != nil { - return err - } - case nil: - default: - return fmt.Errorf("HttpRule.Pattern has unexpected type %T", x) - } - return nil -} - -func _HttpRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { - m := msg.(*HttpRule) - switch tag { - case 2: // pattern.get - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Pattern = &HttpRule_Get{x} - return true, err - case 3: // pattern.put - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Pattern = &HttpRule_Put{x} - return true, err - case 4: // pattern.post - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Pattern = &HttpRule_Post{x} - return true, err - case 5: // pattern.delete - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Pattern = &HttpRule_Delete{x} - return true, err - case 6: // pattern.patch - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - x, err := b.DecodeStringBytes() - m.Pattern = &HttpRule_Patch{x} - return true, err - case 8: // pattern.custom - if wire != proto.WireBytes { - return true, proto.ErrInternalBadWireType - } - msg := new(CustomHttpPattern) - err := b.DecodeMessage(msg) - m.Pattern = &HttpRule_Custom{msg} - return true, err - default: - return false, nil - } -} - -func _HttpRule_OneofSizer(msg proto.Message) (n int) { - m := msg.(*HttpRule) - // pattern - switch x := m.Pattern.(type) { - case *HttpRule_Get: - n += proto.SizeVarint(2<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Get))) - n += len(x.Get) - case *HttpRule_Put: - n += proto.SizeVarint(3<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Put))) - n += len(x.Put) - case *HttpRule_Post: - n += proto.SizeVarint(4<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Post))) - n += len(x.Post) - case *HttpRule_Delete: - n += proto.SizeVarint(5<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Delete))) - n += len(x.Delete) - case *HttpRule_Patch: - n += proto.SizeVarint(6<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(len(x.Patch))) - n += len(x.Patch) - case *HttpRule_Custom: - s := proto.Size(x.Custom) - n += proto.SizeVarint(8<<3 | proto.WireBytes) - n += proto.SizeVarint(uint64(s)) - n += s - case nil: - default: - panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) - } - return n -} - -// A custom pattern is used for defining custom HTTP verb. -type CustomHttpPattern struct { - // The name of this custom HTTP verb. - Kind string `protobuf:"bytes,1,opt,name=kind" json:"kind,omitempty"` - // The path matched by this custom verb. - Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` -} - -func (m *CustomHttpPattern) Reset() { *m = CustomHttpPattern{} } -func (m *CustomHttpPattern) String() string { return proto.CompactTextString(m) } -func (*CustomHttpPattern) ProtoMessage() {} -func (*CustomHttpPattern) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} } - -func (m *CustomHttpPattern) GetKind() string { - if m != nil { - return m.Kind - } - return "" -} - -func (m *CustomHttpPattern) GetPath() string { - if m != nil { - return m.Path - } - return "" -} - -func init() { - proto.RegisterType((*Http)(nil), "google.api.Http") - proto.RegisterType((*HttpRule)(nil), "google.api.HttpRule") - proto.RegisterType((*CustomHttpPattern)(nil), "google.api.CustomHttpPattern") -} - -func init() { proto.RegisterFile("google/api/http.proto", fileDescriptor1) } - -var fileDescriptor1 = []byte{ - // 359 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x6a, 0xe3, 0x30, - 0x10, 0xc6, 0xd7, 0x89, 0xe3, 0x24, 0x13, 0x58, 0x58, 0x6d, 0x76, 0x11, 0x85, 0x42, 0xc8, 0x29, - 0xf4, 0x60, 0x43, 0x7a, 0xe8, 0x21, 0xa7, 0xb8, 0x94, 0xa6, 0xb7, 0xe0, 0x63, 0x2f, 0x45, 0xb1, - 0x85, 0xa2, 0xd6, 0x91, 0x84, 0x3d, 0x3e, 0xf4, 0x75, 0xfa, 0x0e, 0x7d, 0xb7, 0x1e, 0x8b, 0xfe, - 0xa4, 0x09, 0x14, 0x7a, 0x9b, 0xef, 0x37, 0x9f, 0x34, 0xa3, 0x19, 0xc1, 0x3f, 0xa1, 0xb5, 0xa8, - 0x79, 0xc6, 0x8c, 0xcc, 0xf6, 0x88, 0x26, 0x35, 0x8d, 0x46, 0x4d, 0xc0, 0xe3, 0x94, 0x19, 0x39, - 0x5f, 0x42, 0xbc, 0x41, 0x34, 0xe4, 0x0a, 0x06, 0x4d, 0x57, 0xf3, 0x96, 0x46, 0xb3, 0xfe, 0x62, - 0xb2, 0x9c, 0xa6, 0x27, 0x4f, 0x6a, 0x0d, 0x45, 0x57, 0xf3, 0xc2, 0x5b, 0xe6, 0xef, 0x3d, 0x18, - 0x1d, 0x19, 0xb9, 0x80, 0x51, 0xcb, 0x6b, 0x5e, 0xa2, 0x6e, 0x68, 0x34, 0x8b, 0x16, 0xe3, 0xe2, - 0x4b, 0x13, 0x02, 0x7d, 0xc1, 0x91, 0xf6, 0x2c, 0xde, 0xfc, 0x2a, 0xac, 0xb0, 0xcc, 0x74, 0x48, - 0xfb, 0x47, 0x66, 0x3a, 0x24, 0x53, 0x88, 0x8d, 0x6e, 0x91, 0xc6, 0x01, 0x3a, 0x45, 0x28, 0x24, - 0x15, 0xaf, 0x39, 0x72, 0x3a, 0x08, 0x3c, 0x68, 0xf2, 0x1f, 0x06, 0x86, 0x61, 0xb9, 0xa7, 0x49, - 0x48, 0x78, 0x49, 0x6e, 0x20, 0x29, 0xbb, 0x16, 0xf5, 0x81, 0x8e, 0x66, 0xd1, 0x62, 0xb2, 0xbc, - 0x3c, 0x7f, 0xc5, 0xad, 0xcb, 0xd8, 0xbe, 0xb7, 0x0c, 0x91, 0x37, 0xca, 0x5e, 0xe8, 0xed, 0x84, - 0x40, 0xbc, 0xd3, 0xd5, 0x2b, 0x1d, 0xba, 0x07, 0xb8, 0x98, 0xdc, 0xc1, 0x5f, 0x56, 0x55, 0x12, - 0xa5, 0x56, 0xac, 0x7e, 0xda, 0x49, 0x55, 0x49, 0x25, 0x5a, 0x3a, 0xf9, 0x61, 0x3e, 0xe4, 0x74, - 0x20, 0x0f, 0xfe, 0x7c, 0x0c, 0x43, 0xe3, 0xeb, 0xcd, 0x57, 0xf0, 0xe7, 0x5b, 0x13, 0xb6, 0xf4, - 0x8b, 0x54, 0x55, 0x98, 0x9d, 0x8b, 0x2d, 0x33, 0x0c, 0xf7, 0x7e, 0x70, 0x85, 0x8b, 0xf3, 0x67, - 0xf8, 0x5d, 0xea, 0xc3, 0x59, 0xd9, 0x7c, 0xec, 0xae, 0xb1, 0x1b, 0xdd, 0x46, 0x8f, 0xeb, 0x90, - 0x10, 0xba, 0x66, 0x4a, 0xa4, 0xba, 0x11, 0x99, 0xe0, 0xca, 0xed, 0x3b, 0xf3, 0x29, 0x66, 0x64, - 0xeb, 0x7e, 0x02, 0x53, 0x4a, 0x23, 0xb3, 0x6d, 0xb6, 0xab, 0xb3, 0xf8, 0x23, 0x8a, 0xde, 0x7a, - 0xf1, 0xfd, 0x7a, 0xfb, 0xb0, 0x4b, 0xdc, 0xb9, 0xeb, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x68, - 0x15, 0x60, 0x5b, 0x40, 0x02, 0x00, 0x00, -} diff --git a/vendor/modules.txt b/vendor/modules.txt deleted file mode 100644 index 9f155cd..0000000 --- a/vendor/modules.txt +++ /dev/null @@ -1,34 +0,0 @@ -# github.com/Masterminds/semver v1.2.2 -github.com/Masterminds/semver -# github.com/Masterminds/sprig v2.14.1+incompatible -github.com/Masterminds/sprig -# github.com/aokoli/goutils v0.0.0-20170502144750-e57d01ace047 -github.com/aokoli/goutils -# github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b -github.com/golang/glog -# github.com/golang/protobuf v0.0.0-20161117033126-8ee79997227b -github.com/golang/protobuf/proto -github.com/golang/protobuf/protoc-gen-go/descriptor -github.com/golang/protobuf/protoc-gen-go/generator -github.com/golang/protobuf/protoc-gen-go/plugin -# github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f -github.com/gorilla/context -# github.com/gorilla/handlers v0.0.0-20161028133215-e1b2144f2167 -github.com/gorilla/handlers -# github.com/gorilla/mux v1.5.0 -github.com/gorilla/mux -# github.com/grpc-ecosystem/grpc-gateway v1.2.2 -github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor -github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/httprule -github.com/grpc-ecosystem/grpc-gateway/utilities -# github.com/huandu/xstrings v0.0.0-20151130125119-3959339b3335 -github.com/huandu/xstrings -# github.com/imdario/mergo v0.0.0-20171009183408-7fe0c75c13ab -github.com/imdario/mergo -# github.com/satori/go.uuid v1.1.0 -github.com/satori/go.uuid -# golang.org/x/crypto v0.0.0-20170516161655-0fe963104e9d -golang.org/x/crypto/scrypt -golang.org/x/crypto/pbkdf2 -# google.golang.org/genproto v0.0.0-20170517234824-bb3573be0c48 -google.golang.org/genproto/googleapis/api/annotations