fixes and improvements (#55)
* util/router: sync from github * config: add watcher interface Signed-off-by: Vasiliy Tolstov <v.tolstov@unistack.org>
This commit was merged in pull request #55.
This commit is contained in:
@@ -4,7 +4,6 @@ package router
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
@@ -16,6 +15,7 @@ func TestTokenize(t *testing.T) {
|
||||
for _, spec := range []struct {
|
||||
src string
|
||||
tokens []string
|
||||
verb string
|
||||
}{
|
||||
{
|
||||
src: "",
|
||||
@@ -84,32 +84,74 @@ func TestTokenize(t *testing.T) {
|
||||
eof,
|
||||
},
|
||||
},
|
||||
{
|
||||
src: "v1/a/{endpoint}:a",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"a", "/",
|
||||
"{", "endpoint", "}",
|
||||
eof,
|
||||
},
|
||||
verb: "a",
|
||||
},
|
||||
{
|
||||
src: "v1/a/{endpoint}:b:c",
|
||||
tokens: []string{
|
||||
"v1", "/",
|
||||
"a", "/",
|
||||
"{", "endpoint", "}",
|
||||
eof,
|
||||
},
|
||||
verb: "b:c",
|
||||
},
|
||||
} {
|
||||
tokens, verb := tokenize(spec.src)
|
||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", spec.src, got, want)
|
||||
}
|
||||
if got, want := verb, ""; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
||||
}
|
||||
|
||||
src := fmt.Sprintf("%s:%s", spec.src, "LOCK")
|
||||
tokens, verb = tokenize(src)
|
||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", src, got, want)
|
||||
}
|
||||
if got, want := verb, "LOCK"; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", src, got, want)
|
||||
switch {
|
||||
case spec.verb != "":
|
||||
if got, want := verb, spec.verb; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", spec.src, got, want)
|
||||
}
|
||||
|
||||
default:
|
||||
if got, want := verb, ""; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", spec.src, got, want)
|
||||
}
|
||||
|
||||
src := fmt.Sprintf("%s:%s", spec.src, "LOCK")
|
||||
tokens, verb = tokenize(src)
|
||||
if got, want := tokens, spec.tokens; !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("tokenize(%q) = %q, _; want %q, _", src, got, want)
|
||||
}
|
||||
if got, want := verb, "LOCK"; got != want {
|
||||
t.Errorf("tokenize(%q) = _, %q; want _, %q", src, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseSegments(t *testing.T) {
|
||||
flag.Set("v", "3")
|
||||
for _, spec := range []struct {
|
||||
tokens []string
|
||||
want []segment
|
||||
}{
|
||||
{
|
||||
tokens: []string{eof},
|
||||
want: []segment{
|
||||
literal(eof),
|
||||
},
|
||||
},
|
||||
{
|
||||
// Note: this case will never arise as tokenize() will never return such a sequence of tokens
|
||||
// and even if it does it will be treated as [eof]
|
||||
tokens: []string{eof, "v1", eof},
|
||||
want: []segment{
|
||||
literal(eof),
|
||||
},
|
||||
},
|
||||
{
|
||||
tokens: []string{"v1", eof},
|
||||
want: []segment{
|
||||
@@ -251,7 +293,6 @@ func TestParseSegments(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestParseSegmentsWithErrors(t *testing.T) {
|
||||
flag.Set("v", "3")
|
||||
for _, spec := range []struct {
|
||||
tokens []string
|
||||
}{
|
||||
@@ -275,10 +316,6 @@ func TestParseSegmentsWithErrors(t *testing.T) {
|
||||
// invalid percent-encoding
|
||||
tokens: []string{"a%2z", eof},
|
||||
},
|
||||
{
|
||||
// empty segments
|
||||
tokens: []string{eof},
|
||||
},
|
||||
{
|
||||
// unterminated variable
|
||||
tokens: []string{"{", "name", eof},
|
||||
|
||||
Reference in New Issue
Block a user