fixes and improvements #55
@ -8,18 +8,18 @@ const (
|
|||||||
|
|
||||||
// Template is a compiled representation of path templates.
|
// Template is a compiled representation of path templates.
|
||||||
type Template struct {
|
type Template struct {
|
||||||
// Verb is a VERB part in the template
|
// Version is the version number of the format.
|
||||||
Verb string
|
Version int
|
||||||
// Original template (example: /v1/a_bit_of_everything)
|
// OpCodes is a sequence of operations.
|
||||||
Template string
|
|
||||||
// OpCodes is a sequence of operations
|
|
||||||
OpCodes []int
|
OpCodes []int
|
||||||
// Pool is a constant pool
|
// Pool is a constant pool
|
||||||
Pool []string
|
Pool []string
|
||||||
// Fields is a list of field paths bound in this template
|
// Verb is a VERB part in the template.
|
||||||
|
Verb string
|
||||||
|
// Fields is a list of field paths bound in this template.
|
||||||
Fields []string
|
Fields []string
|
||||||
// Version is the version number of the format
|
// Original template (example: /v1/a_bit_of_everything)
|
||||||
Version int
|
Template string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compiler compiles utilities representation of path templates into marshallable operations.
|
// Compiler compiles utilities representation of path templates into marshallable operations.
|
||||||
@ -29,9 +29,15 @@ type Compiler interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type op struct {
|
type op struct {
|
||||||
|
// code is the opcode of the operation
|
||||||
|
code utilities.OpCode
|
||||||
|
|
||||||
|
// str is a string operand of the code.
|
||||||
|
// num is ignored if str is not empty.
|
||||||
str string
|
str string
|
||||||
code OpCode
|
|
||||||
operand int
|
// num is a numeric operand of the code.
|
||||||
|
num int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w wildcard) compile() []op {
|
func (w wildcard) compile() []op {
|
||||||
@ -77,7 +83,6 @@ func (t template) Compile() Template {
|
|||||||
rawOps = append(rawOps, s.compile()...)
|
rawOps = append(rawOps, s.compile()...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ops := make([]int, 0, len(rawOps))
|
|
||||||
var (
|
var (
|
||||||
ops []int
|
ops []int
|
||||||
pool []string
|
pool []string
|
||||||
@ -87,8 +92,12 @@ func (t template) Compile() Template {
|
|||||||
for _, op := range rawOps {
|
for _, op := range rawOps {
|
||||||
ops = append(ops, int(op.code))
|
ops = append(ops, int(op.code))
|
||||||
if op.str == "" {
|
if op.str == "" {
|
||||||
ops = append(ops, op.operand)
|
ops = append(ops, op.num)
|
||||||
} else {
|
} else {
|
||||||
|
// eof segment literal represents the "/" path pattern
|
||||||
|
if op.str == eof {
|
||||||
|
op.str = ""
|
||||||
|
}
|
||||||
if _, ok := consts[op.str]; !ok {
|
if _, ok := consts[op.str]; !ok {
|
||||||
consts[op.str] = len(pool)
|
consts[op.str] = len(pool)
|
||||||
pool = append(pool, op.str)
|
pool = append(pool, op.str)
|
||||||
|
@ -21,6 +21,13 @@ func TestCompile(t *testing.T) {
|
|||||||
fields []string
|
fields []string
|
||||||
}{
|
}{
|
||||||
{},
|
{},
|
||||||
|
{
|
||||||
|
segs: []segment{
|
||||||
|
literal(eof),
|
||||||
|
},
|
||||||
|
ops: []int{int(utilities.OpLitPush), 0},
|
||||||
|
pool: []string{""},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
segs: []segment{
|
segs: []segment{
|
||||||
wildcard{},
|
wildcard{},
|
||||||
|
@ -83,8 +83,30 @@ func tokenize(path string) (tokens []string, verb string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
l := len(tokens)
|
l := len(tokens)
|
||||||
|
// See
|
||||||
|
// https://github.com/grpc-ecosystem/grpc-gateway/pull/1947#issuecomment-774523693 ;
|
||||||
|
// although normal and backwards-compat logic here is to use the last index
|
||||||
|
// of a colon, if the final segment is a variable followed by a colon, the
|
||||||
|
// part following the colon must be a verb. Hence if the previous token is
|
||||||
|
// an end var marker, we switch the index we're looking for to Index instead
|
||||||
|
// of LastIndex, so that we correctly grab the remaining part of the path as
|
||||||
|
// the verb.
|
||||||
|
var penultimateTokenIsEndVar bool
|
||||||
|
switch l {
|
||||||
|
case 0, 1:
|
||||||
|
// Not enough to be variable so skip this logic and don't result in an
|
||||||
|
// invalid index
|
||||||
|
default:
|
||||||
|
penultimateTokenIsEndVar = tokens[l-2] == "}"
|
||||||
|
}
|
||||||
t := tokens[l-1]
|
t := tokens[l-1]
|
||||||
if idx := strings.LastIndex(t, ":"); idx == 0 {
|
var idx int
|
||||||
|
if penultimateTokenIsEndVar {
|
||||||
|
idx = strings.Index(t, ":")
|
||||||
|
} else {
|
||||||
|
idx = strings.LastIndex(t, ":")
|
||||||
|
}
|
||||||
|
if idx == 0 {
|
||||||
tokens, verb = tokens[:l-1], t[1:]
|
tokens, verb = tokens[:l-1], t[1:]
|
||||||
} else if idx > 0 {
|
} else if idx > 0 {
|
||||||
tokens[l-1], verb = t[:idx], t[idx+1:]
|
tokens[l-1], verb = t[:idx], t[idx+1:]
|
||||||
@ -101,22 +123,17 @@ type parser struct {
|
|||||||
|
|
||||||
// topLevelSegments is the target of this parser.
|
// topLevelSegments is the target of this parser.
|
||||||
func (p *parser) topLevelSegments() ([]segment, error) {
|
func (p *parser) topLevelSegments() ([]segment, error) {
|
||||||
if logger.V(logger.TraceLevel) {
|
if _, err := p.accept(typeEOF); err == nil {
|
||||||
logger.Trace(context.TODO(), "Parsing %q", p.tokens)
|
p.tokens = p.tokens[:0]
|
||||||
|
return []segment{literal(eof)}, nil
|
||||||
}
|
}
|
||||||
segs, err := p.segments()
|
segs, err := p.segments()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if logger.V(logger.TraceLevel) {
|
|
||||||
logger.Trace(context.TODO(), "accept segments: %q; %q", p.accepted, p.tokens)
|
|
||||||
}
|
|
||||||
if _, err := p.accept(typeEOF); err != nil {
|
if _, err := p.accept(typeEOF); err != nil {
|
||||||
return nil, fmt.Errorf("unexpected token %q after segments %q", p.tokens[0], strings.Join(p.accepted, ""))
|
return nil, fmt.Errorf("unexpected token %q after segments %q", p.tokens[0], strings.Join(p.accepted, ""))
|
||||||
}
|
}
|
||||||
if logger.V(logger.TraceLevel) {
|
|
||||||
logger.Trace(context.TODO(), "accept eof: %q; %q", p.accepted, p.tokens)
|
|
||||||
}
|
|
||||||
return segs, nil
|
return segs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -126,9 +143,6 @@ func (p *parser) segments() ([]segment, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if logger.V(logger.TraceLevel) {
|
|
||||||
logger.Trace(context.TODO(), "accept segment: %q; %q", p.accepted, p.tokens)
|
|
||||||
}
|
|
||||||
segs := []segment{s}
|
segs := []segment{s}
|
||||||
for {
|
for {
|
||||||
if _, err := p.accept("/"); err != nil {
|
if _, err := p.accept("/"); err != nil {
|
||||||
@ -139,9 +153,6 @@ func (p *parser) segments() ([]segment, error) {
|
|||||||
return segs, err
|
return segs, err
|
||||||
}
|
}
|
||||||
segs = append(segs, s)
|
segs = append(segs, s)
|
||||||
if logger.V(logger.TraceLevel) {
|
|
||||||
logger.Trace(context.TODO(), "accept segment: %q; %q", p.accepted, p.tokens)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ func TestTokenize(t *testing.T) {
|
|||||||
for _, spec := range []struct {
|
for _, spec := range []struct {
|
||||||
src string
|
src string
|
||||||
tokens []string
|
tokens []string
|
||||||
|
verb string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
src: "",
|
src: "",
|
||||||
@ -84,6 +85,26 @@ func TestTokenize(t *testing.T) {
|
|||||||
eof,
|
eof,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
src: "v1/a/{endpoint}:a",
|
||||||
|
tokens: []string{
|
||||||
|
"v1", "/",
|
||||||
|
"a", "/",
|
||||||
|
"{", "endpoint", "}",
|
||||||
|
eof,
|
||||||
|
},
|
||||||
|
verb: "a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
src: "v1/a/{endpoint}:b:c",
|
||||||
|
tokens: []string{
|
||||||
|
"v1", "/",
|
||||||
|
"a", "/",
|
||||||
|
"{", "endpoint", "}",
|
||||||
|
eof,
|
||||||
|
},
|
||||||
|
verb: "b:c",
|
||||||
|
},
|
||||||
} {
|
} {
|
||||||
tokens, verb := tokenize(spec.src)
|
tokens, verb := tokenize(spec.src)
|
||||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||||
@ -93,6 +114,17 @@ func TestTokenize(t *testing.T) {
|
|||||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case spec.verb != "":
|
||||||
|
if got, want := verb, spec.verb; !reflect.DeepEqual(got, want) {
|
||||||
|
t.Errorf("tokenize(%q) = %q, _; want %q, _", spec.src, got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
if got, want := verb, ""; got != want {
|
||||||
|
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
||||||
|
}
|
||||||
|
|
||||||
src := fmt.Sprintf("%s:%s", spec.src, "LOCK")
|
src := fmt.Sprintf("%s:%s", spec.src, "LOCK")
|
||||||
tokens, verb = tokenize(src)
|
tokens, verb = tokenize(src)
|
||||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||||
@ -102,14 +134,32 @@ func TestTokenize(t *testing.T) {
|
|||||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", src, got, want)
|
t.Errorf("tokenize(%q) = _, %q; want _, %q", src, got, want)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseSegments(t *testing.T) {
|
func TestParseSegments(t *testing.T) {
|
||||||
flag.Set("v", "3")
|
err := flag.Set("v", "3")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to set flag: %v", err)
|
||||||
|
}
|
||||||
for _, spec := range []struct {
|
for _, spec := range []struct {
|
||||||
tokens []string
|
tokens []string
|
||||||
want []segment
|
want []segment
|
||||||
}{
|
}{
|
||||||
|
{
|
||||||
|
tokens: []string{eof},
|
||||||
|
want: []segment{
|
||||||
|
literal(eof),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Note: this case will never arise as tokenize() will never return such a sequence of tokens
|
||||||
|
// and even if it does it will be treated as [eof]
|
||||||
|
tokens: []string{eof, "v1", eof},
|
||||||
|
want: []segment{
|
||||||
|
literal(eof),
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
tokens: []string{"v1", eof},
|
tokens: []string{"v1", eof},
|
||||||
want: []segment{
|
want: []segment{
|
||||||
@ -251,7 +301,10 @@ func TestParseSegments(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestParseSegmentsWithErrors(t *testing.T) {
|
func TestParseSegmentsWithErrors(t *testing.T) {
|
||||||
flag.Set("v", "3")
|
err := flag.Set("v", "3")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to set flag: %v", err)
|
||||||
|
}
|
||||||
for _, spec := range []struct {
|
for _, spec := range []struct {
|
||||||
tokens []string
|
tokens []string
|
||||||
}{
|
}{
|
||||||
@ -275,10 +328,6 @@ func TestParseSegmentsWithErrors(t *testing.T) {
|
|||||||
// invalid percent-encoding
|
// invalid percent-encoding
|
||||||
tokens: []string{"a%2z", eof},
|
tokens: []string{"a%2z", eof},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
// empty segments
|
|
||||||
tokens: []string{eof},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
// unterminated variable
|
// unterminated variable
|
||||||
tokens: []string{"{", "name", eof},
|
tokens: []string{"{", "name", eof},
|
||||||
|
@ -23,9 +23,9 @@ type rop struct {
|
|||||||
operand int
|
operand int
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pattern is a template pattern of http request paths defined in github.com/googleapis/googleapis/google/api/http.proto.
|
// Pattern is a template pattern of http request paths defined in
|
||||||
|
// https://github.com/googleapis/googleapis/blob/master/google/api/http.proto
|
||||||
type Pattern struct {
|
type Pattern struct {
|
||||||
verb string
|
|
||||||
// ops is a list of operations
|
// ops is a list of operations
|
||||||
ops []rop
|
ops []rop
|
||||||
// pool is a constant pool indexed by the operands or vars
|
// pool is a constant pool indexed by the operands or vars
|
||||||
@ -36,32 +36,16 @@ type Pattern struct {
|
|||||||
stacksize int
|
stacksize int
|
||||||
// tailLen is the length of the fixed-size segments after a deep wildcard
|
// tailLen is the length of the fixed-size segments after a deep wildcard
|
||||||
tailLen int
|
tailLen int
|
||||||
// assumeColonVerb indicates whether a path suffix after a final
|
// verb is the VERB part of the path pattern. It is empty if the pattern does not have VERB part.
|
||||||
// colon may only be interpreted as a verb.
|
verb string
|
||||||
assumeColonVerb bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type patternOptions struct {
|
|
||||||
assumeColonVerb bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// PatternOpt is an option for creating Patterns.
|
|
||||||
type PatternOpt func(*patternOptions)
|
|
||||||
|
|
||||||
// NewPattern returns a new Pattern from the given definition values.
|
// NewPattern returns a new Pattern from the given definition values.
|
||||||
// "ops" is a sequence of op codes. "pool" is a constant pool.
|
// "ops" is a sequence of op codes. "pool" is a constant pool.
|
||||||
// "verb" is the verb part of the pattern. It is empty if the pattern does not have the part.
|
// "verb" is the verb part of the pattern. It is empty if the pattern does not have the part.
|
||||||
// "version" must be 1 for now.
|
// "version" must be 1 for now.
|
||||||
// It returns an error if the given definition is invalid.
|
// It returns an error if the given definition is invalid.
|
||||||
//nolint:gocyclo
|
func NewPattern(version int, ops []int, pool []string, verb string) (Pattern, error) {
|
||||||
func NewPattern(version int, ops []int, pool []string, verb string, opts ...PatternOpt) (Pattern, error) {
|
|
||||||
options := patternOptions{
|
|
||||||
assumeColonVerb: true,
|
|
||||||
}
|
|
||||||
for _, o := range opts {
|
|
||||||
o(&options)
|
|
||||||
}
|
|
||||||
|
|
||||||
if version != 1 {
|
if version != 1 {
|
||||||
if logger.V(logger.TraceLevel) {
|
if logger.V(logger.TraceLevel) {
|
||||||
logger.Trace(context.TODO(), "unsupported version: %d", version)
|
logger.Trace(context.TODO(), "unsupported version: %d", version)
|
||||||
@ -185,7 +169,7 @@ func MustPattern(p Pattern, err error) Pattern {
|
|||||||
//nolint:gocyclo
|
//nolint:gocyclo
|
||||||
func (p Pattern) Match(components []string, verb string) (map[string]string, error) {
|
func (p Pattern) Match(components []string, verb string) (map[string]string, error) {
|
||||||
if p.verb != verb {
|
if p.verb != verb {
|
||||||
if p.assumeColonVerb || p.verb != "" {
|
if p.verb != "" {
|
||||||
return nil, ErrNotMatch
|
return nil, ErrNotMatch
|
||||||
}
|
}
|
||||||
if len(components) == 0 {
|
if len(components) == 0 {
|
||||||
@ -274,11 +258,3 @@ func (p Pattern) String() string {
|
|||||||
}
|
}
|
||||||
return "/" + segs
|
return "/" + segs
|
||||||
}
|
}
|
||||||
|
|
||||||
// AssumeColonVerbOpt indicates whether a path suffix after a final
|
|
||||||
// colon may only be interpreted as a verb.
|
|
||||||
func AssumeColonVerbOpt(val bool) PatternOpt {
|
|
||||||
return PatternOpt(func(o *patternOptions) {
|
|
||||||
o.assumeColonVerb = val
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
@ -8,9 +8,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type template struct {
|
type template struct {
|
||||||
|
segments []segment
|
||||||
verb string
|
verb string
|
||||||
template string
|
template string
|
||||||
segments []segment
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type segment interface {
|
type segment interface {
|
||||||
|
Loading…
Reference in New Issue
Block a user