util/router: drop google copy of pattern matcher in favour of util/http trie
Signed-off-by: Vasiliy Tolstov <v.tolstov@unistack.org>
This commit is contained in:
parent
ecb60e4dc5
commit
8237e6a08e
@ -1,27 +0,0 @@
|
||||
Copyright (c) 2015, Gengo, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of Gengo, Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -1,119 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/compile.go
|
||||
|
||||
const (
|
||||
opcodeVersion = 1
|
||||
)
|
||||
|
||||
// Template is a compiled representation of path templates.
|
||||
type Template struct {
|
||||
// Version is the version number of the format.
|
||||
Version int
|
||||
// OpCodes is a sequence of operations.
|
||||
OpCodes []int
|
||||
// Pool is a constant pool
|
||||
Pool []string
|
||||
// Verb is a VERB part in the template.
|
||||
Verb string
|
||||
// Fields is a list of field paths bound in this template.
|
||||
Fields []string
|
||||
// Original template (example: /v1/a_bit_of_everything)
|
||||
Template string
|
||||
}
|
||||
|
||||
// Compiler compiles utilities representation of path templates into marshallable operations.
|
||||
// They can be unmarshalled by runtime.NewPattern.
|
||||
type Compiler interface {
|
||||
Compile() Template
|
||||
}
|
||||
|
||||
type op struct {
|
||||
// code is the opcode of the operation
|
||||
code OpCode
|
||||
|
||||
// str is a string operand of the code.
|
||||
// num is ignored if str is not empty.
|
||||
str string
|
||||
|
||||
// num is a numeric operand of the code.
|
||||
num int
|
||||
}
|
||||
|
||||
func (w wildcard) compile() []op {
|
||||
return []op{
|
||||
{code: OpPush},
|
||||
}
|
||||
}
|
||||
|
||||
func (w deepWildcard) compile() []op {
|
||||
return []op{
|
||||
{code: OpPushM},
|
||||
}
|
||||
}
|
||||
|
||||
func (l literal) compile() []op {
|
||||
return []op{
|
||||
{
|
||||
code: OpLitPush,
|
||||
str: string(l),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (v variable) compile() []op {
|
||||
ops := make([]op, 0, len(v.segments))
|
||||
for _, s := range v.segments {
|
||||
ops = append(ops, s.compile()...)
|
||||
}
|
||||
ops = append(ops, op{
|
||||
code: OpConcatN,
|
||||
num: len(v.segments),
|
||||
}, op{
|
||||
code: OpCapture,
|
||||
str: v.path,
|
||||
})
|
||||
|
||||
return ops
|
||||
}
|
||||
|
||||
func (t template) Compile() Template {
|
||||
rawOps := make([]op, 0, len(t.segments))
|
||||
for _, s := range t.segments {
|
||||
rawOps = append(rawOps, s.compile()...)
|
||||
}
|
||||
|
||||
var (
|
||||
ops []int
|
||||
pool []string
|
||||
fields []string
|
||||
)
|
||||
consts := make(map[string]int)
|
||||
for _, op := range rawOps {
|
||||
ops = append(ops, int(op.code))
|
||||
if op.str == "" {
|
||||
ops = append(ops, op.num)
|
||||
} else {
|
||||
// eof segment literal represents the "/" path pattern
|
||||
if op.str == eof {
|
||||
op.str = ""
|
||||
}
|
||||
if _, ok := consts[op.str]; !ok {
|
||||
consts[op.str] = len(pool)
|
||||
pool = append(pool, op.str)
|
||||
}
|
||||
ops = append(ops, consts[op.str])
|
||||
}
|
||||
if op.code == OpCapture {
|
||||
fields = append(fields, op.str)
|
||||
}
|
||||
}
|
||||
return Template{
|
||||
Version: opcodeVersion,
|
||||
OpCodes: ops,
|
||||
Pool: pool,
|
||||
Verb: t.verb,
|
||||
Fields: fields,
|
||||
Template: t.template,
|
||||
}
|
||||
}
|
@ -1,129 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/compile_test.go
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
operandFiller = 0
|
||||
)
|
||||
|
||||
func TestCompile(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
segs []segment
|
||||
verb string
|
||||
|
||||
ops []int
|
||||
pool []string
|
||||
fields []string
|
||||
}{
|
||||
{},
|
||||
{
|
||||
segs: []segment{
|
||||
literal(eof),
|
||||
},
|
||||
ops: []int{int(OpLitPush), 0},
|
||||
pool: []string{""},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
ops: []int{int(OpPush), operandFiller},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
deepWildcard{},
|
||||
},
|
||||
ops: []int{int(OpPushM), operandFiller},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
literal("v1"),
|
||||
},
|
||||
ops: []int{int(OpLitPush), 0},
|
||||
pool: []string{"v1"},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
literal("v1"),
|
||||
},
|
||||
verb: "LOCK",
|
||||
ops: []int{int(OpLitPush), 0},
|
||||
pool: []string{"v1"},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
variable{
|
||||
path: "name.nested",
|
||||
segments: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
ops: []int{
|
||||
int(OpPush), operandFiller,
|
||||
int(OpConcatN), 1,
|
||||
int(OpCapture), 0,
|
||||
},
|
||||
pool: []string{"name.nested"},
|
||||
fields: []string{"name.nested"},
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
literal("obj"),
|
||||
variable{
|
||||
path: "name.nested",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
wildcard{},
|
||||
literal("b"),
|
||||
},
|
||||
},
|
||||
variable{
|
||||
path: "obj",
|
||||
segments: []segment{
|
||||
deepWildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
ops: []int{
|
||||
int(OpLitPush), 0,
|
||||
int(OpLitPush), 1,
|
||||
int(OpPush), operandFiller,
|
||||
int(OpLitPush), 2,
|
||||
int(OpConcatN), 3,
|
||||
int(OpCapture), 3,
|
||||
int(OpPushM), operandFiller,
|
||||
int(OpConcatN), 1,
|
||||
int(OpCapture), 0,
|
||||
},
|
||||
pool: []string{"obj", "a", "b", "name.nested"},
|
||||
fields: []string{"name.nested", "obj"},
|
||||
},
|
||||
} {
|
||||
tmpl := template{
|
||||
segments: spec.segs,
|
||||
verb: spec.verb,
|
||||
}
|
||||
compiled := tmpl.Compile()
|
||||
if got, want := compiled.Version, opcodeVersion; got != want {
|
||||
t.Errorf("tmpl.Compile().Version = %d; want %d; segs=%#v, verb=%q", got, want, spec.segs, spec.verb)
|
||||
}
|
||||
if got, want := compiled.OpCodes, spec.ops; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tmpl.Compile().OpCodes = %v; want %v; segs=%#v, verb=%q", got, want, spec.segs, spec.verb)
|
||||
}
|
||||
if got, want := compiled.Pool, spec.pool; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tmpl.Compile().Pool = %q; want %q; segs=%#v, verb=%q", got, want, spec.segs, spec.verb)
|
||||
}
|
||||
if got, want := compiled.Verb, spec.verb; got != want {
|
||||
t.Errorf("tmpl.Compile().Verb = %q; want %q; segs=%#v, verb=%q", got, want, spec.segs, spec.verb)
|
||||
}
|
||||
if got, want := compiled.Fields, spec.fields; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tmpl.Compile().Fields = %q; want %q; segs=%#v, verb=%q", got, want, spec.segs, spec.verb)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,371 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/parse.go
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// InvalidTemplateError indicates that the path template is not valid.
|
||||
type InvalidTemplateError struct {
|
||||
tmpl string
|
||||
msg string
|
||||
}
|
||||
|
||||
func (e InvalidTemplateError) Error() string {
|
||||
return fmt.Sprintf("%s: %s", e.msg, e.tmpl)
|
||||
}
|
||||
|
||||
// Parse parses the string representation of path template
|
||||
func Parse(tmpl string) (Compiler, error) {
|
||||
if !strings.HasPrefix(tmpl, "/") {
|
||||
return template{}, InvalidTemplateError{tmpl: tmpl, msg: "no leading /"}
|
||||
}
|
||||
tokens, verb := tokenize(tmpl[1:])
|
||||
|
||||
p := parser{tokens: tokens}
|
||||
segs, err := p.topLevelSegments()
|
||||
if err != nil {
|
||||
return template{}, InvalidTemplateError{tmpl: tmpl, msg: err.Error()}
|
||||
}
|
||||
|
||||
return template{
|
||||
segments: segs,
|
||||
verb: verb,
|
||||
template: tmpl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func tokenize(path string) (tokens []string, verb string) {
|
||||
if path == "" {
|
||||
return []string{eof}, ""
|
||||
}
|
||||
|
||||
const (
|
||||
init = iota
|
||||
field
|
||||
nested
|
||||
)
|
||||
st := init
|
||||
for path != "" {
|
||||
var idx int
|
||||
switch st {
|
||||
case init:
|
||||
idx = strings.IndexAny(path, "/{")
|
||||
case field:
|
||||
idx = strings.IndexAny(path, ".=}")
|
||||
case nested:
|
||||
idx = strings.IndexAny(path, "/}")
|
||||
}
|
||||
if idx < 0 {
|
||||
tokens = append(tokens, path)
|
||||
break
|
||||
}
|
||||
switch r := path[idx]; r {
|
||||
case '/', '.':
|
||||
case '{':
|
||||
st = field
|
||||
case '=':
|
||||
st = nested
|
||||
case '}':
|
||||
st = init
|
||||
}
|
||||
if idx == 0 {
|
||||
tokens = append(tokens, path[idx:idx+1])
|
||||
} else {
|
||||
tokens = append(tokens, path[:idx], path[idx:idx+1])
|
||||
}
|
||||
path = path[idx+1:]
|
||||
}
|
||||
|
||||
l := len(tokens)
|
||||
// See
|
||||
// https://github.com/grpc-ecosystem/grpc-gateway/pull/1947#issuecomment-774523693 ;
|
||||
// although normal and backwards-compat logic here is to use the last index
|
||||
// of a colon, if the final segment is a variable followed by a colon, the
|
||||
// part following the colon must be a verb. Hence if the previous token is
|
||||
// an end var marker, we switch the index we're looking for to Index instead
|
||||
// of LastIndex, so that we correctly grab the remaining part of the path as
|
||||
// the verb.
|
||||
var penultimateTokenIsEndVar bool
|
||||
switch l {
|
||||
case 0, 1:
|
||||
// Not enough to be variable so skip this logic and don't result in an
|
||||
// invalid index
|
||||
default:
|
||||
penultimateTokenIsEndVar = tokens[l-2] == "}"
|
||||
}
|
||||
t := tokens[l-1]
|
||||
var idx int
|
||||
if penultimateTokenIsEndVar {
|
||||
idx = strings.Index(t, ":")
|
||||
} else {
|
||||
idx = strings.LastIndex(t, ":")
|
||||
}
|
||||
if idx == 0 {
|
||||
tokens, verb = tokens[:l-1], t[1:]
|
||||
} else if idx > 0 {
|
||||
tokens[l-1], verb = t[:idx], t[idx+1:]
|
||||
}
|
||||
tokens = append(tokens, eof)
|
||||
return tokens, verb
|
||||
}
|
||||
|
||||
// parser is a parser of the template syntax defined in github.com/googleapis/googleapis/google/api/http.proto.
|
||||
type parser struct {
|
||||
tokens []string
|
||||
accepted []string
|
||||
}
|
||||
|
||||
// topLevelSegments is the target of this parser.
|
||||
func (p *parser) topLevelSegments() ([]segment, error) {
|
||||
if _, err := p.accept(typeEOF); err == nil {
|
||||
p.tokens = p.tokens[:0]
|
||||
return []segment{literal(eof)}, nil
|
||||
}
|
||||
segs, err := p.segments()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := p.accept(typeEOF); err != nil {
|
||||
return nil, fmt.Errorf("unexpected token %q after segments %q", p.tokens[0], strings.Join(p.accepted, ""))
|
||||
}
|
||||
return segs, nil
|
||||
}
|
||||
|
||||
func (p *parser) segments() ([]segment, error) {
|
||||
s, err := p.segment()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
segs := []segment{s}
|
||||
for {
|
||||
if _, err := p.accept("/"); err != nil {
|
||||
return segs, nil
|
||||
}
|
||||
s, err := p.segment()
|
||||
if err != nil {
|
||||
return segs, err
|
||||
}
|
||||
segs = append(segs, s)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) segment() (segment, error) {
|
||||
if _, err := p.accept("*"); err == nil {
|
||||
return wildcard{}, nil
|
||||
}
|
||||
if _, err := p.accept("**"); err == nil {
|
||||
return deepWildcard{}, nil
|
||||
}
|
||||
if l, err := p.literal(); err == nil {
|
||||
return l, nil
|
||||
}
|
||||
|
||||
v, err := p.variable()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("segment neither wildcards, literal or variable: %v", err)
|
||||
}
|
||||
return v, err
|
||||
}
|
||||
|
||||
func (p *parser) literal() (segment, error) {
|
||||
lit, err := p.accept(typeLiteral)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return literal(lit), nil
|
||||
}
|
||||
|
||||
func (p *parser) variable() (segment, error) {
|
||||
if _, err := p.accept("{"); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
path, err := p.fieldPath()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var segs []segment
|
||||
if _, err := p.accept("="); err == nil {
|
||||
segs, err = p.segments()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid segment in variable %q: %v", path, err)
|
||||
}
|
||||
} else {
|
||||
segs = []segment{wildcard{}}
|
||||
}
|
||||
|
||||
if _, err := p.accept("}"); err != nil {
|
||||
return nil, fmt.Errorf("unterminated variable segment: %s", path)
|
||||
}
|
||||
return variable{
|
||||
path: path,
|
||||
segments: segs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *parser) fieldPath() (string, error) {
|
||||
c, err := p.accept(typeIdent)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
components := []string{c}
|
||||
for {
|
||||
if _, err = p.accept("."); err != nil {
|
||||
return strings.Join(components, "."), nil
|
||||
}
|
||||
c, err := p.accept(typeIdent)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid field path component: %v", err)
|
||||
}
|
||||
components = append(components, c)
|
||||
}
|
||||
}
|
||||
|
||||
// A termType is a type of terminal symbols.
|
||||
type termType string
|
||||
|
||||
// These constants define some of valid values of termType.
|
||||
// They improve readability of parse functions.
|
||||
//
|
||||
// You can also use "/", "*", "**", "." or "=" as valid values.
|
||||
const (
|
||||
typeIdent = termType("ident")
|
||||
typeLiteral = termType("literal")
|
||||
typeEOF = termType("$")
|
||||
)
|
||||
|
||||
const (
|
||||
// eof is the terminal symbol which always appears at the end of token sequence.
|
||||
eof = "\u0000"
|
||||
)
|
||||
|
||||
// accept tries to accept a token in "p".
|
||||
// This function consumes a token and returns it if it matches to the specified "term".
|
||||
// If it doesn't match, the function does not consume any tokens and return an error.
|
||||
func (p *parser) accept(term termType) (string, error) {
|
||||
t := p.tokens[0]
|
||||
switch term {
|
||||
case "/", "*", "**", ".", "=", "{", "}":
|
||||
if t != string(term) && t != "/" {
|
||||
return "", fmt.Errorf("expected %q but got %q", term, t)
|
||||
}
|
||||
case typeEOF:
|
||||
if t != eof {
|
||||
return "", fmt.Errorf("expected EOF but got %q", t)
|
||||
}
|
||||
case typeIdent:
|
||||
if err := expectIdent(t); err != nil {
|
||||
return "", err
|
||||
}
|
||||
case typeLiteral:
|
||||
if err := expectPChars(t); err != nil {
|
||||
return "", err
|
||||
}
|
||||
default:
|
||||
return "", fmt.Errorf("unknown termType %q", term)
|
||||
}
|
||||
p.tokens = p.tokens[1:]
|
||||
p.accepted = append(p.accepted, t)
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// expectPChars determines if "t" consists of only pchars defined in RFC3986.
|
||||
//
|
||||
// https://www.ietf.org/rfc/rfc3986.txt, P.49
|
||||
// pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
|
||||
// / "*" / "+" / "," / ";" / "="
|
||||
// pct-encoded = "%" HEXDIG HEXDIG
|
||||
//nolint:gocyclo
|
||||
func expectPChars(t string) error {
|
||||
const (
|
||||
init = iota
|
||||
pct1
|
||||
pct2
|
||||
)
|
||||
st := init
|
||||
for _, r := range t {
|
||||
if st != init {
|
||||
if !isHexDigit(r) {
|
||||
return fmt.Errorf("invalid hexdigit: %c(%U)", r, r)
|
||||
}
|
||||
switch st {
|
||||
case pct1:
|
||||
st = pct2
|
||||
case pct2:
|
||||
st = init
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// unreserved
|
||||
switch {
|
||||
case 'A' <= r && r <= 'Z':
|
||||
continue
|
||||
case 'a' <= r && r <= 'z':
|
||||
continue
|
||||
case '0' <= r && r <= '9':
|
||||
continue
|
||||
}
|
||||
switch r {
|
||||
case '-', '.', '_', '~':
|
||||
// unreserved
|
||||
case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=':
|
||||
// sub-delims
|
||||
case ':', '@':
|
||||
// rest of pchar
|
||||
case '%':
|
||||
// pct-encoded
|
||||
st = pct1
|
||||
default:
|
||||
return fmt.Errorf("invalid character in path segment: %q(%U)", r, r)
|
||||
}
|
||||
}
|
||||
if st != init {
|
||||
return fmt.Errorf("invalid percent-encoding in %q", t)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// expectIdent determines if "ident" is a valid identifier in .proto schema ([[:alpha:]_][[:alphanum:]_]*).
|
||||
func expectIdent(ident string) error {
|
||||
if ident == "" {
|
||||
return fmt.Errorf("empty identifier")
|
||||
}
|
||||
for pos, r := range ident {
|
||||
switch {
|
||||
case '0' <= r && r <= '9':
|
||||
if pos == 0 {
|
||||
return fmt.Errorf("identifier starting with digit: %s", ident)
|
||||
}
|
||||
continue
|
||||
case 'A' <= r && r <= 'Z':
|
||||
continue
|
||||
case 'a' <= r && r <= 'z':
|
||||
continue
|
||||
case r == '_':
|
||||
continue
|
||||
default:
|
||||
return fmt.Errorf("invalid character %q(%U) in identifier: %s", r, r, ident)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func isHexDigit(r rune) bool {
|
||||
switch {
|
||||
case '0' <= r && r <= '9':
|
||||
return true
|
||||
case 'A' <= r && r <= 'F':
|
||||
return true
|
||||
case 'a' <= r && r <= 'f':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
@ -1,360 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/parse_test.go
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/unistack-org/micro/v3/logger"
|
||||
)
|
||||
|
||||
func TestTokenize(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
src string
|
||||
tokens []string
|
||||
verb string
|
||||
}{
|
||||
{
|
||||
src: "",
|
||||
tokens: []string{eof},
|
||||
},
|
||||
{
|
||||
src: "v1",
|
||||
tokens: []string{"v1", eof},
|
||||
},
|
||||
{
|
||||
src: "v1/b",
|
||||
tokens: []string{"v1", "/", "b", eof},
|
||||
},
|
||||
{
|
||||
src: "v1/endpoint/*",
|
||||
tokens: []string{"v1", "/", "endpoint", "/", "*", eof},
|
||||
},
|
||||
{
|
||||
src: "v1/endpoint/**",
|
||||
tokens: []string{"v1", "/", "endpoint", "/", "**", eof},
|
||||
},
|
||||
{
|
||||
src: "v1/b/{bucket_name=*}",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"b", "/",
|
||||
"{", "bucket_name", "=", "*", "}",
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/b/{bucket_name=buckets/*}",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"b", "/",
|
||||
"{", "bucket_name", "=", "buckets", "/", "*", "}",
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/b/{bucket_name=buckets/*}/o",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"b", "/",
|
||||
"{", "bucket_name", "=", "buckets", "/", "*", "}", "/",
|
||||
"o",
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/b/{bucket_name=buckets/*}/o/{name}",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"b", "/",
|
||||
"{", "bucket_name", "=", "buckets", "/", "*", "}", "/",
|
||||
"o", "/", "{", "name", "}",
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/a=b&c=d;e=f:g/endpoint.rdf",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"a=b&c=d;e=f:g", "/",
|
||||
"endpoint.rdf",
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/a/{endpoint}:a",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"a", "/",
|
||||
"{", "endpoint", "}",
|
||||
eof,
|
||||
},
|
||||
verb: "a",
|
||||
},
|
||||
{
|
||||
src: "v1/a/{endpoint}:b:c",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"a", "/",
|
||||
"{", "endpoint", "}",
|
||||
eof,
|
||||
},
|
||||
verb: "b:c",
|
||||
},
|
||||
} {
|
||||
tokens, verb := tokenize(spec.src)
|
||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", spec.src, got, want)
|
||||
}
|
||||
|
||||
switch {
|
||||
case spec.verb != "":
|
||||
if got, want := verb, spec.verb; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", spec.src, got, want)
|
||||
}
|
||||
|
||||
default:
|
||||
if got, want := verb, ""; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
||||
}
|
||||
|
||||
src := fmt.Sprintf("%s:%s", spec.src, "LOCK")
|
||||
tokens, verb = tokenize(src)
|
||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", src, got, want)
|
||||
}
|
||||
if got, want := verb, "LOCK"; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", src, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseSegments(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
tokens []string
|
||||
want []segment
|
||||
}{
|
||||
{
|
||||
tokens: []string{eof},
|
||||
want: []segment{
|
||||
literal(eof),
|
||||
},
|
||||
},
|
||||
{
|
||||
// Note: this case will never arise as tokenize() will never return such a sequence of tokens
|
||||
// and even if it does it will be treated as [eof]
|
||||
tokens: []string{eof, "v1", eof},
|
||||
want: []segment{
|
||||
literal(eof),
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"v1", eof},
|
||||
want: []segment{
|
||||
literal("v1"),
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"/", eof},
|
||||
want: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"-._~!$&'()*+,;=:@", eof},
|
||||
want: []segment{
|
||||
literal("-._~!$&'()*+,;=:@"),
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"%e7%ac%ac%e4%b8%80%e7%89%88", eof},
|
||||
want: []segment{
|
||||
literal("%e7%ac%ac%e4%b8%80%e7%89%88"),
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"v1", "/", "*", eof},
|
||||
want: []segment{
|
||||
literal("v1"),
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"v1", "/", "**", eof},
|
||||
want: []segment{
|
||||
literal("v1"),
|
||||
deepWildcard{},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"{", "name", "}", eof},
|
||||
want: []segment{
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"{", "name", "=", "*", "}", eof},
|
||||
want: []segment{
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"{", "field", ".", "nested", ".", "nested2", "=", "*", "}", eof},
|
||||
want: []segment{
|
||||
variable{
|
||||
path: "field.nested.nested2",
|
||||
segments: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"{", "name", "=", "a", "/", "b", "/", "*", "}", eof},
|
||||
want: []segment{
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
literal("b"),
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"{",
|
||||
"name", ".", "nested", ".", "nested2",
|
||||
"=",
|
||||
"a", "/", "b", "/", "*",
|
||||
"}", "/",
|
||||
"o", "/",
|
||||
"{",
|
||||
"another_name",
|
||||
"=",
|
||||
"a", "/", "b", "/", "*", "/", "c",
|
||||
"}", "/",
|
||||
"**",
|
||||
eof,
|
||||
},
|
||||
want: []segment{
|
||||
literal("v1"),
|
||||
variable{
|
||||
path: "name.nested.nested2",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
literal("b"),
|
||||
wildcard{},
|
||||
},
|
||||
},
|
||||
literal("o"),
|
||||
variable{
|
||||
path: "another_name",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
literal("b"),
|
||||
wildcard{},
|
||||
literal("c"),
|
||||
},
|
||||
},
|
||||
deepWildcard{},
|
||||
},
|
||||
},
|
||||
} {
|
||||
p := parser{tokens: spec.tokens}
|
||||
segs, err := p.topLevelSegments()
|
||||
if err != nil {
|
||||
t.Errorf("parser{%q}.segments() failed with %v; want success", spec.tokens, err)
|
||||
continue
|
||||
}
|
||||
if got, want := segs, spec.want; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("parser{%q}.segments() = %#v; want %#v", spec.tokens, got, want)
|
||||
}
|
||||
if got := p.tokens; len(got) > 0 {
|
||||
t.Errorf("p.tokens = %q; want []; spec.tokens=%q", got, spec.tokens)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseSegmentsWithErrors(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
tokens []string
|
||||
}{
|
||||
{
|
||||
// double slash
|
||||
tokens: []string{"//", eof},
|
||||
},
|
||||
{
|
||||
// invalid literal
|
||||
tokens: []string{"a?b", eof},
|
||||
},
|
||||
{
|
||||
// invalid percent-encoding
|
||||
tokens: []string{"%", eof},
|
||||
},
|
||||
{
|
||||
// invalid percent-encoding
|
||||
tokens: []string{"%2", eof},
|
||||
},
|
||||
{
|
||||
// invalid percent-encoding
|
||||
tokens: []string{"a%2z", eof},
|
||||
},
|
||||
{
|
||||
// unterminated variable
|
||||
tokens: []string{"{", "name", eof},
|
||||
},
|
||||
{
|
||||
// unterminated variable
|
||||
tokens: []string{"{", "name", "=", eof},
|
||||
},
|
||||
{
|
||||
// unterminated variable
|
||||
tokens: []string{"{", "name", "=", "*", eof},
|
||||
},
|
||||
{
|
||||
// empty component in field path
|
||||
tokens: []string{"{", "name", ".", "}", eof},
|
||||
},
|
||||
{
|
||||
// empty component in field path
|
||||
tokens: []string{"{", "name", ".", ".", "nested", "}", eof},
|
||||
},
|
||||
{
|
||||
// invalid character in identifier
|
||||
tokens: []string{"{", "field-name", "}", eof},
|
||||
},
|
||||
{
|
||||
// no slash between segments
|
||||
tokens: []string{"v1", "endpoint", eof},
|
||||
},
|
||||
{
|
||||
// no slash between segments
|
||||
tokens: []string{"v1", "{", "name", "}", eof},
|
||||
},
|
||||
} {
|
||||
p := parser{tokens: spec.tokens}
|
||||
segs, err := p.topLevelSegments()
|
||||
if err == nil {
|
||||
t.Errorf("parser{%q}.segments() succeeded; want InvalidTemplateError; accepted %#v", spec.tokens, segs)
|
||||
continue
|
||||
}
|
||||
logger.Info(context.TODO(), err.Error())
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/utilities/pattern.go
|
||||
|
||||
// An OpCode is a opcode of compiled path patterns.
|
||||
type OpCode int
|
||||
|
||||
// These constants are the valid values of OpCode.
|
||||
const (
|
||||
// OpNop does nothing
|
||||
OpNop = OpCode(iota)
|
||||
// OpPush pushes a component to stack
|
||||
OpPush
|
||||
// OpLitPush pushes a component to stack if it matches to the literal
|
||||
OpLitPush
|
||||
// OpPushM concatenates the remaining components and pushes it to stack
|
||||
OpPushM
|
||||
// OpConcatN pops N items from stack, concatenates them and pushes it back to stack
|
||||
OpConcatN
|
||||
// OpCapture pops an item and binds it to the variable
|
||||
OpCapture
|
||||
// OpEnd is the least positive invalid opcode.
|
||||
OpEnd
|
||||
)
|
@ -1,32 +0,0 @@
|
||||
package router
|
||||
|
||||
import (
|
||||
"github.com/unistack-org/micro/v3/register"
|
||||
"github.com/unistack-org/micro/v3/router"
|
||||
)
|
||||
|
||||
type apiRouter struct {
|
||||
router.Router
|
||||
routes []router.Route
|
||||
}
|
||||
|
||||
func (r *apiRouter) Lookup(...router.QueryOption) ([]router.Route, error) {
|
||||
return r.routes, nil
|
||||
}
|
||||
|
||||
func (r *apiRouter) String() string {
|
||||
return "api"
|
||||
}
|
||||
|
||||
// New router is a hack for API routing
|
||||
func New(srvs []*register.Service) router.Router {
|
||||
var routes []router.Route
|
||||
|
||||
for _, srv := range srvs {
|
||||
for _, n := range srv.Nodes {
|
||||
routes = append(routes, router.Route{Address: n.Address, Metadata: n.Metadata})
|
||||
}
|
||||
}
|
||||
|
||||
return &apiRouter{routes: routes}
|
||||
}
|
@ -1,259 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/runtime/pattern.go
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/unistack-org/micro/v3/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrNotMatch indicates that the given HTTP request path does not match to the pattern.
|
||||
ErrNotMatch = errors.New("not match to the path pattern")
|
||||
// ErrInvalidPattern indicates that the given definition of Pattern is not valid.
|
||||
ErrInvalidPattern = errors.New("invalid pattern")
|
||||
)
|
||||
|
||||
type rop struct {
|
||||
code OpCode
|
||||
operand int
|
||||
}
|
||||
|
||||
// Pattern is a template pattern of http request paths defined in
|
||||
// https://github.com/googleapis/googleapis/blob/master/google/api/http.proto
|
||||
type Pattern struct {
|
||||
// ops is a list of operations
|
||||
ops []rop
|
||||
// pool is a constant pool indexed by the operands or vars
|
||||
pool []string
|
||||
// vars is a list of variables names to be bound by this pattern
|
||||
vars []string
|
||||
// stacksize is the max depth of the stack
|
||||
stacksize int
|
||||
// tailLen is the length of the fixed-size segments after a deep wildcard
|
||||
tailLen int
|
||||
// verb is the VERB part of the path pattern. It is empty if the pattern does not have VERB part.
|
||||
verb string
|
||||
}
|
||||
|
||||
// NewPattern returns a new Pattern from the given definition values.
|
||||
// "ops" is a sequence of op codes. "pool" is a constant pool.
|
||||
// "verb" is the verb part of the pattern. It is empty if the pattern does not have the part.
|
||||
// "version" must be 1 for now.
|
||||
// It returns an error if the given definition is invalid.
|
||||
func NewPattern(version int, ops []int, pool []string, verb string) (Pattern, error) {
|
||||
if version != 1 {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "unsupported version: %d", version)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
|
||||
l := len(ops)
|
||||
if l%2 != 0 {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "odd number of ops codes: %d", l)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
|
||||
var (
|
||||
typedOps []rop
|
||||
stack, maxstack int
|
||||
tailLen int
|
||||
pushMSeen bool
|
||||
vars []string
|
||||
)
|
||||
for i := 0; i < l; i += 2 {
|
||||
op := rop{code: OpCode(ops[i]), operand: ops[i+1]}
|
||||
switch op.code {
|
||||
case OpNop:
|
||||
continue
|
||||
case OpPush:
|
||||
if pushMSeen {
|
||||
tailLen++
|
||||
}
|
||||
stack++
|
||||
case OpPushM:
|
||||
if pushMSeen {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "pushM appears twice")
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
pushMSeen = true
|
||||
stack++
|
||||
case OpLitPush:
|
||||
if op.operand < 0 || len(pool) <= op.operand {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "negative literal index: %d", op.operand)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
if pushMSeen {
|
||||
tailLen++
|
||||
}
|
||||
stack++
|
||||
case OpConcatN:
|
||||
if op.operand <= 0 {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "negative concat size: %d", op.operand)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
stack -= op.operand
|
||||
if stack < 0 {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "stack underflow")
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
stack++
|
||||
case OpCapture:
|
||||
if op.operand < 0 || len(pool) <= op.operand {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "variable name index out of bound: %d", op.operand)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
v := pool[op.operand]
|
||||
op.operand = len(vars)
|
||||
vars = append(vars, v)
|
||||
stack--
|
||||
if stack < 0 {
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "stack underflow")
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
default:
|
||||
if logger.V(logger.TraceLevel) {
|
||||
logger.Trace(context.TODO(), "invalid opcode: %d", op.code)
|
||||
}
|
||||
return Pattern{}, ErrInvalidPattern
|
||||
}
|
||||
|
||||
if maxstack < stack {
|
||||
maxstack = stack
|
||||
}
|
||||
typedOps = append(typedOps, op)
|
||||
}
|
||||
return Pattern{
|
||||
ops: typedOps,
|
||||
pool: pool,
|
||||
vars: vars,
|
||||
stacksize: maxstack,
|
||||
tailLen: tailLen,
|
||||
verb: verb,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// MustPattern is a helper function which makes it easier to call NewPattern in variable initialization.
|
||||
func MustPattern(p Pattern, err error) Pattern {
|
||||
if err != nil {
|
||||
if logger.V(logger.FatalLevel) {
|
||||
logger.Fatal(context.TODO(), "Pattern initialization failed: %v", err)
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// Match examines components if it matches to the Pattern.
|
||||
// If it matches, the function returns a mapping from field paths to their captured values.
|
||||
// If otherwise, the function returns an error.
|
||||
//nolint:gocyclo
|
||||
func (p Pattern) Match(components []string, verb string) (map[string]string, error) {
|
||||
if p.verb != verb {
|
||||
if p.verb != "" {
|
||||
return nil, ErrNotMatch
|
||||
}
|
||||
if len(components) == 0 {
|
||||
components = []string{":" + verb}
|
||||
} else {
|
||||
components = append([]string{}, components...)
|
||||
components[len(components)-1] += ":" + verb
|
||||
}
|
||||
}
|
||||
|
||||
var pos int
|
||||
stack := make([]string, 0, p.stacksize)
|
||||
captured := make([]string, len(p.vars))
|
||||
l := len(components)
|
||||
for _, op := range p.ops {
|
||||
switch op.code {
|
||||
case OpNop:
|
||||
continue
|
||||
case OpPush, OpLitPush:
|
||||
if pos >= l {
|
||||
return nil, ErrNotMatch
|
||||
}
|
||||
c := components[pos]
|
||||
if op.code == OpLitPush {
|
||||
if lit := p.pool[op.operand]; c != lit {
|
||||
return nil, ErrNotMatch
|
||||
}
|
||||
}
|
||||
stack = append(stack, c)
|
||||
pos++
|
||||
case OpPushM:
|
||||
end := len(components)
|
||||
if end < pos+p.tailLen {
|
||||
return nil, ErrNotMatch
|
||||
}
|
||||
end -= p.tailLen
|
||||
stack = append(stack, strings.Join(components[pos:end], "/"))
|
||||
pos = end
|
||||
case OpConcatN:
|
||||
n := op.operand
|
||||
l := len(stack) - n
|
||||
stack = append(stack[:l], strings.Join(stack[l:], "/"))
|
||||
case OpCapture:
|
||||
n := len(stack) - 1
|
||||
captured[op.operand] = stack[n]
|
||||
stack = stack[:n]
|
||||
}
|
||||
}
|
||||
if pos < l {
|
||||
return nil, ErrNotMatch
|
||||
}
|
||||
bindings := make(map[string]string, len(captured))
|
||||
for i, val := range captured {
|
||||
bindings[p.vars[i]] = val
|
||||
}
|
||||
return bindings, nil
|
||||
}
|
||||
|
||||
// Verb returns the verb part of the Pattern.
|
||||
func (p Pattern) Verb() string { return p.verb }
|
||||
|
||||
func (p Pattern) String() string {
|
||||
var stack []string
|
||||
for _, op := range p.ops {
|
||||
switch op.code {
|
||||
case OpNop:
|
||||
continue
|
||||
case OpPush:
|
||||
stack = append(stack, "*")
|
||||
case OpLitPush:
|
||||
stack = append(stack, p.pool[op.operand])
|
||||
case OpPushM:
|
||||
stack = append(stack, "**")
|
||||
case OpConcatN:
|
||||
n := op.operand
|
||||
l := len(stack) - n
|
||||
stack = append(stack[:l], strings.Join(stack[l:], "/"))
|
||||
case OpCapture:
|
||||
n := len(stack) - 1
|
||||
stack[n] = fmt.Sprintf("{%s=%s}", p.vars[op.operand], stack[n])
|
||||
}
|
||||
}
|
||||
segs := strings.Join(stack, "/")
|
||||
if p.verb != "" {
|
||||
return fmt.Sprintf("/%s:%s", segs, p.verb)
|
||||
}
|
||||
return "/" + segs
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/types.go
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type template struct {
|
||||
segments []segment
|
||||
verb string
|
||||
template string
|
||||
}
|
||||
|
||||
type segment interface {
|
||||
fmt.Stringer
|
||||
compile() (ops []op)
|
||||
}
|
||||
|
||||
type wildcard struct{}
|
||||
|
||||
type deepWildcard struct{}
|
||||
|
||||
type literal string
|
||||
|
||||
type variable struct {
|
||||
path string
|
||||
segments []segment
|
||||
}
|
||||
|
||||
func (wildcard) String() string {
|
||||
return "*"
|
||||
}
|
||||
|
||||
func (deepWildcard) String() string {
|
||||
return "**"
|
||||
}
|
||||
|
||||
func (l literal) String() string {
|
||||
return string(l)
|
||||
}
|
||||
|
||||
func (v variable) String() string {
|
||||
segs := make([]string, 0, len(v.segments))
|
||||
for _, s := range v.segments {
|
||||
segs = append(segs, s.String())
|
||||
}
|
||||
return fmt.Sprintf("{%s=%s}", v.path, strings.Join(segs, "/"))
|
||||
}
|
||||
|
||||
func (t template) String() string {
|
||||
segs := make([]string, 0, len(t.segments))
|
||||
for _, s := range t.segments {
|
||||
segs = append(segs, s.String())
|
||||
}
|
||||
str := strings.Join(segs, "/")
|
||||
if t.verb != "" {
|
||||
str = fmt.Sprintf("%s:%s", str, t.verb)
|
||||
}
|
||||
return "/" + str
|
||||
}
|
@ -1,93 +0,0 @@
|
||||
package router
|
||||
|
||||
// download from https://raw.githubusercontent.com/grpc-ecosystem/grpc-gateway/master/protoc-gen-grpc-gateway/httprule/types_test.go
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTemplateStringer(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
segs []segment
|
||||
want string
|
||||
}{
|
||||
{
|
||||
segs: []segment{
|
||||
literal("v1"),
|
||||
},
|
||||
want: "/v1",
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
wildcard{},
|
||||
},
|
||||
want: "/*",
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
deepWildcard{},
|
||||
},
|
||||
want: "/**",
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
},
|
||||
},
|
||||
},
|
||||
want: "/{name=a}",
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
wildcard{},
|
||||
literal("b"),
|
||||
},
|
||||
},
|
||||
},
|
||||
want: "/{name=a/*/b}",
|
||||
},
|
||||
{
|
||||
segs: []segment{
|
||||
literal("v1"),
|
||||
variable{
|
||||
path: "name",
|
||||
segments: []segment{
|
||||
literal("a"),
|
||||
wildcard{},
|
||||
literal("b"),
|
||||
},
|
||||
},
|
||||
literal("c"),
|
||||
variable{
|
||||
path: "field.nested",
|
||||
segments: []segment{
|
||||
wildcard{},
|
||||
literal("d"),
|
||||
},
|
||||
},
|
||||
wildcard{},
|
||||
literal("e"),
|
||||
deepWildcard{},
|
||||
},
|
||||
want: "/v1/{name=a/*/b}/c/{field.nested=*/d}/*/e/**",
|
||||
},
|
||||
} {
|
||||
tmpl := template{segments: spec.segs}
|
||||
if got, want := tmpl.String(), spec.want; got != want {
|
||||
t.Errorf("%#v.String() = %q; want %q", tmpl, got, want)
|
||||
}
|
||||
|
||||
tmpl.verb = "LOCK"
|
||||
if got, want := tmpl.String(), fmt.Sprintf("%s:LOCK", spec.want); got != want {
|
||||
t.Errorf("%#v.String() = %q; want %q", tmpl, got, want)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user