Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
8e72fb1c35 | |||
17f21a03f4 | |||
a076d43a26 | |||
de6efaee0b | |||
9e0e657003 | |||
be5f9ab77f | |||
144dca0cae | |||
75173560e3 | |||
9b3bccd1f1 | |||
ce125b77c1 | |||
2ee8d4ed46 | |||
f58781d076 | |||
e1af4aa3a4 |
@@ -7,7 +7,9 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/imdario/mergo"
|
||||
mid "go.unistack.org/micro/v3/util/id"
|
||||
rutil "go.unistack.org/micro/v3/util/reflect"
|
||||
mtime "go.unistack.org/micro/v3/util/time"
|
||||
)
|
||||
@@ -124,6 +126,20 @@ func fillValue(value reflect.Value, val string) error {
|
||||
}
|
||||
value.Set(reflect.ValueOf(v))
|
||||
case reflect.String:
|
||||
switch val {
|
||||
case "micro:generate uuid":
|
||||
uid, err := uuid.NewRandom()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
val = uid.String()
|
||||
case "micro:generate id":
|
||||
uid, err := mid.New()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
val = uid
|
||||
}
|
||||
value.Set(reflect.ValueOf(val))
|
||||
case reflect.Float32:
|
||||
v, err := strconv.ParseFloat(val, 32)
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"go.unistack.org/micro/v3/config"
|
||||
mid "go.unistack.org/micro/v3/util/id"
|
||||
mtime "go.unistack.org/micro/v3/util/time"
|
||||
)
|
||||
|
||||
@@ -14,9 +15,12 @@ type cfg struct {
|
||||
StringValue string `default:"string_value"`
|
||||
IgnoreValue string `json:"-"`
|
||||
StructValue *cfgStructValue
|
||||
IntValue int `default:"99"`
|
||||
DurationValue time.Duration `default:"10s"`
|
||||
MDurationValue mtime.Duration `default:"10s"`
|
||||
IntValue int `default:"99"`
|
||||
DurationValue time.Duration `default:"10s"`
|
||||
MDurationValue mtime.Duration `default:"10s"`
|
||||
MapValue map[string]bool `default:"key1=true,key2=false"`
|
||||
UUIDValue string `default:"micro:generate uuid"`
|
||||
IDValue string `default:"micro:generate id"`
|
||||
}
|
||||
|
||||
type cfgStructValue struct {
|
||||
@@ -67,6 +71,21 @@ func TestDefault(t *testing.T) {
|
||||
if conf.StringValue != "after_load" {
|
||||
t.Fatal("AfterLoad option not working")
|
||||
}
|
||||
if len(conf.MapValue) != 2 {
|
||||
t.Fatalf("map value invalid: %#+v\n", conf.MapValue)
|
||||
}
|
||||
|
||||
if conf.UUIDValue == "" {
|
||||
t.Fatalf("uuid value empty")
|
||||
} else if len(conf.UUIDValue) != 36 {
|
||||
t.Fatalf("uuid value invalid: %s", conf.UUIDValue)
|
||||
}
|
||||
|
||||
if conf.IDValue == "" {
|
||||
t.Fatalf("id value empty")
|
||||
} else if len(conf.IDValue) != mid.DefaultSize {
|
||||
t.Fatalf("id value invalid: %s", conf.IDValue)
|
||||
}
|
||||
_ = conf
|
||||
// t.Logf("%#+v\n", conf)
|
||||
}
|
||||
|
12
go.mod
12
go.mod
@@ -3,7 +3,17 @@ module go.unistack.org/micro/v3
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
github.com/imdario/mergo v0.3.14
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/imdario/mergo v0.3.15
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35
|
||||
golang.org/x/sync v0.3.0
|
||||
google.golang.org/grpc v1.57.0
|
||||
google.golang.org/protobuf v1.31.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/golang/protobuf v1.5.3 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 // indirect
|
||||
)
|
||||
|
27
go.sum
27
go.sum
@@ -1,9 +1,32 @@
|
||||
github.com/imdario/mergo v0.3.14 h1:fOqeC1+nCuuk6PKQdg9YmosXX7Y7mHX6R/0ZldI9iHo=
|
||||
github.com/imdario/mergo v0.3.14/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
||||
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35 h1:4mohWoM/UGg1BvFFiqSPRl5uwJY3rVV0HQX0ETqauqQ=
|
||||
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35/go.mod h1:7RTUFBdIRC9nZ7/3RyRNH1bdqIShrDejd1YbLwgPS+I=
|
||||
golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM=
|
||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
|
||||
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 h1:0nDDozoAU19Qb2HwhXadU8OcsiO/09cnTqhUtq2MEOM=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA=
|
||||
google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw=
|
||||
google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
@@ -39,8 +39,6 @@ func FromOutgoingContext(ctx context.Context) (Metadata, bool) {
|
||||
|
||||
// FromContext returns metadata from the given context
|
||||
// returned metadata shoud not be modified or race condition happens
|
||||
//
|
||||
// Deprecated: use FromIncomingContext or FromOutgoingContext
|
||||
func FromContext(ctx context.Context) (Metadata, bool) {
|
||||
if ctx == nil {
|
||||
return nil, false
|
||||
@@ -53,8 +51,6 @@ func FromContext(ctx context.Context) (Metadata, bool) {
|
||||
}
|
||||
|
||||
// NewContext creates a new context with the given metadata
|
||||
//
|
||||
// Deprecated: use NewIncomingContext or NewOutgoingContext
|
||||
func NewContext(ctx context.Context, md Metadata) context.Context {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
|
@@ -202,39 +202,6 @@ func (n *noopServer) Register() error {
|
||||
n.Lock()
|
||||
defer n.Unlock()
|
||||
|
||||
cx := config.Context
|
||||
|
||||
var sub broker.Subscriber
|
||||
|
||||
for sb := range n.subscribers {
|
||||
if sb.Options().Context != nil {
|
||||
cx = sb.Options().Context
|
||||
}
|
||||
|
||||
opts := []broker.SubscribeOption{broker.SubscribeContext(cx), broker.SubscribeAutoAck(sb.Options().AutoAck)}
|
||||
if queue := sb.Options().Queue; len(queue) > 0 {
|
||||
opts = append(opts, broker.SubscribeGroup(queue))
|
||||
}
|
||||
|
||||
if sb.Options().Batch {
|
||||
// batch processing handler
|
||||
sub, err = config.Broker.BatchSubscribe(cx, sb.Topic(), n.newBatchSubHandler(sb, config), opts...)
|
||||
} else {
|
||||
// single processing handler
|
||||
sub, err = config.Broker.Subscribe(cx, sb.Topic(), n.newSubHandler(sb, config), opts...)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if config.Logger.V(logger.InfoLevel) {
|
||||
config.Logger.Infof(n.opts.Context, "subscribing to topic: %s", sb.Topic())
|
||||
}
|
||||
|
||||
n.subscribers[sb] = []broker.Subscriber{sub}
|
||||
}
|
||||
|
||||
n.registered = true
|
||||
if cacheService {
|
||||
n.rsvc = service
|
||||
@@ -366,6 +333,10 @@ func (n *noopServer) Start() error {
|
||||
}
|
||||
}
|
||||
|
||||
if err := n.subscribe(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
go func() {
|
||||
t := new(time.Ticker)
|
||||
|
||||
@@ -449,6 +420,45 @@ func (n *noopServer) Start() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *noopServer) subscribe() error {
|
||||
config := n.Options()
|
||||
|
||||
cx := config.Context
|
||||
var err error
|
||||
var sub broker.Subscriber
|
||||
|
||||
for sb := range n.subscribers {
|
||||
if sb.Options().Context != nil {
|
||||
cx = sb.Options().Context
|
||||
}
|
||||
|
||||
opts := []broker.SubscribeOption{broker.SubscribeContext(cx), broker.SubscribeAutoAck(sb.Options().AutoAck)}
|
||||
if queue := sb.Options().Queue; len(queue) > 0 {
|
||||
opts = append(opts, broker.SubscribeGroup(queue))
|
||||
}
|
||||
|
||||
if sb.Options().Batch {
|
||||
// batch processing handler
|
||||
sub, err = config.Broker.BatchSubscribe(cx, sb.Topic(), n.createBatchSubHandler(sb, config), opts...)
|
||||
} else {
|
||||
// single processing handler
|
||||
sub, err = config.Broker.Subscribe(cx, sb.Topic(), n.createSubHandler(sb, config), opts...)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if config.Logger.V(logger.InfoLevel) {
|
||||
config.Logger.Infof(n.opts.Context, "subscribing to topic: %s", sb.Topic())
|
||||
}
|
||||
|
||||
n.subscribers[sb] = []broker.Subscriber{sub}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *noopServer) Stop() error {
|
||||
n.RLock()
|
||||
if !n.started {
|
||||
|
@@ -191,7 +191,7 @@ func newSubscriber(topic string, sub interface{}, opts ...SubscriberOption) Subs
|
||||
}
|
||||
|
||||
//nolint:gocyclo
|
||||
func (n *noopServer) newBatchSubHandler(sb *subscriber, opts Options) broker.BatchHandler {
|
||||
func (n *noopServer) createBatchSubHandler(sb *subscriber, opts Options) broker.BatchHandler {
|
||||
return func(ps broker.Events) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
@@ -309,7 +309,7 @@ func (n *noopServer) newBatchSubHandler(sb *subscriber, opts Options) broker.Bat
|
||||
}
|
||||
|
||||
//nolint:gocyclo
|
||||
func (n *noopServer) newSubHandler(sb *subscriber, opts Options) broker.Handler {
|
||||
func (n *noopServer) createSubHandler(sb *subscriber, opts Options) broker.Handler {
|
||||
return func(p broker.Event) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
|
@@ -4,6 +4,8 @@ import (
|
||||
"context"
|
||||
)
|
||||
|
||||
var _ Tracer = (*noopTracer)(nil)
|
||||
|
||||
type noopTracer struct {
|
||||
opts Options
|
||||
}
|
||||
@@ -21,6 +23,10 @@ func (t *noopTracer) Start(ctx context.Context, name string, opts ...SpanOption)
|
||||
return NewSpanContext(ctx, span), span
|
||||
}
|
||||
|
||||
func (t *noopTracer) Flush(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *noopTracer) Init(opts ...Option) error {
|
||||
for _, o := range opts {
|
||||
o(&t.opts)
|
||||
|
@@ -16,6 +16,8 @@ type Tracer interface {
|
||||
Init(...Option) error
|
||||
// Start a trace
|
||||
Start(ctx context.Context, name string, opts ...SpanOption) (context.Context, Span)
|
||||
// Flush flushes spans
|
||||
Flush(ctx context.Context) error
|
||||
}
|
||||
|
||||
type Span interface {
|
||||
|
@@ -5,8 +5,32 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTrieBackwards(t *testing.T) {
|
||||
_ = &Trie{}
|
||||
func TestTrieRPC(t *testing.T) {
|
||||
var err error
|
||||
type handler struct {
|
||||
name string
|
||||
}
|
||||
tr := NewTrie()
|
||||
if err = tr.Insert([]string{"helloworld"}, "Call", &handler{name: "helloworld.Call"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err = tr.Insert([]string{"helloworld"}, "Stream", &handler{name: "helloworld.Stream"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
h, _, err := tr.Search("helloworld", "Call")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error %v", err)
|
||||
}
|
||||
if h.(*handler).name != "helloworld.Call" {
|
||||
t.Fatalf("invalid handler %v", h)
|
||||
}
|
||||
h, _, err = tr.Search("helloworld", "Stream")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error %v", err)
|
||||
}
|
||||
if h.(*handler).name != "helloworld.Stream" {
|
||||
t.Fatalf("invalid handler %v", h)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTrieWildcardPathPrefix(t *testing.T) {
|
||||
|
505
util/test/test.go
Normal file
505
util/test/test.go
Normal file
@@ -0,0 +1,505 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql/driver"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
sqlmock "github.com/DATA-DOG/go-sqlmock"
|
||||
"go.unistack.org/micro/v3/client"
|
||||
"go.unistack.org/micro/v3/codec"
|
||||
"go.unistack.org/micro/v3/errors"
|
||||
"go.unistack.org/micro/v3/metadata"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"google.golang.org/grpc/status"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
var ErrUnknownContentType = fmt.Errorf("unknown content type")
|
||||
|
||||
type Extension struct {
|
||||
Ext []string
|
||||
}
|
||||
|
||||
var (
|
||||
// ExtToTypes map file extension to content type
|
||||
ExtToTypes = map[string][]string{
|
||||
"json": {"application/json", "application/grpc+json"},
|
||||
"yaml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
|
||||
"yml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
|
||||
"proto": {"application/grpc", "application/grpc+proto", "application/proto"},
|
||||
}
|
||||
// DefaultExts specifies default file extensions to load data
|
||||
DefaultExts = []string{"csv", "json", "yaml", "yml", "proto"}
|
||||
// Codecs map to detect codec for test file or request content type
|
||||
Codecs map[string]codec.Codec
|
||||
|
||||
// ResponseCompareFunc used to compare actual response with test case data
|
||||
ResponseCompareFunc = func(expectRsp []byte, testRsp interface{}, expectCodec codec.Codec, testCodec codec.Codec) error {
|
||||
var err error
|
||||
|
||||
expectMap := make(map[string]interface{})
|
||||
if err = expectCodec.Unmarshal(expectRsp, &expectMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
|
||||
testMap := make(map[string]interface{})
|
||||
switch v := testRsp.(type) {
|
||||
case *codec.Frame:
|
||||
if err = testCodec.Unmarshal(v.Data, &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
case *errors.Error:
|
||||
if err = expectCodec.Unmarshal([]byte(v.Error()), &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
case error:
|
||||
st, ok := status.FromError(v)
|
||||
if !ok {
|
||||
return v
|
||||
}
|
||||
me := errors.Parse(st.Message())
|
||||
if me.Code != 0 {
|
||||
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
break
|
||||
}
|
||||
for _, se := range st.Details() {
|
||||
switch ne := se.(type) {
|
||||
case proto.Message:
|
||||
buf, err := testCodec.Marshal(ne)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal err: %w", err)
|
||||
}
|
||||
if err = testCodec.Unmarshal(buf, &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
default:
|
||||
return st.Err()
|
||||
}
|
||||
}
|
||||
case interface{ GRPCStatus() *status.Status }:
|
||||
st := v.GRPCStatus()
|
||||
me := errors.Parse(st.Message())
|
||||
if me.Code != 0 {
|
||||
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
break
|
||||
}
|
||||
case *status.Status:
|
||||
me := errors.Parse(v.Message())
|
||||
if me.Code != 0 {
|
||||
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
break
|
||||
}
|
||||
for _, se := range v.Details() {
|
||||
switch ne := se.(type) {
|
||||
case proto.Message:
|
||||
buf, err := testCodec.Marshal(ne)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal err: %w", err)
|
||||
}
|
||||
if err = testCodec.Unmarshal(buf, &testMap); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal err: %w", err)
|
||||
}
|
||||
default:
|
||||
return v.Err()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(expectMap, testMap) {
|
||||
return fmt.Errorf("test: %s != rsp: %s", expectMap, testMap)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
)
|
||||
|
||||
func FromCSVString(columns []*sqlmock.Column, rows *sqlmock.Rows, s string) *sqlmock.Rows {
|
||||
res := strings.NewReader(strings.TrimSpace(s))
|
||||
csvReader := csv.NewReader(res)
|
||||
|
||||
for {
|
||||
res, err := csvReader.Read()
|
||||
if err != nil || res == nil {
|
||||
break
|
||||
}
|
||||
|
||||
var row []driver.Value
|
||||
for i, v := range res {
|
||||
item := CSVColumnParser(strings.TrimSpace(v))
|
||||
if null, nullOk := columns[i].IsNullable(); null && nullOk && item == nil {
|
||||
row = append(row, nil)
|
||||
} else {
|
||||
row = append(row, item)
|
||||
}
|
||||
|
||||
}
|
||||
rows = rows.AddRow(row...)
|
||||
}
|
||||
|
||||
return rows
|
||||
}
|
||||
|
||||
func CSVColumnParser(s string) []byte {
|
||||
switch {
|
||||
case strings.ToLower(s) == "null":
|
||||
return nil
|
||||
case s == "":
|
||||
return nil
|
||||
}
|
||||
return []byte(s)
|
||||
}
|
||||
|
||||
func NewResponseFromFile(rspfile string) (*codec.Frame, error) {
|
||||
rspbuf, err := os.ReadFile(rspfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &codec.Frame{Data: rspbuf}, nil
|
||||
}
|
||||
|
||||
func getCodec(codecs map[string]codec.Codec, ext string) (codec.Codec, error) {
|
||||
var c codec.Codec
|
||||
if cts, ok := ExtToTypes[ext]; ok {
|
||||
for _, t := range cts {
|
||||
if c, ok = codecs[t]; ok {
|
||||
return c, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, ErrUnknownContentType
|
||||
}
|
||||
|
||||
func getContentType(codecs map[string]codec.Codec, ext string) (string, error) {
|
||||
if cts, ok := ExtToTypes[ext]; ok {
|
||||
for _, t := range cts {
|
||||
if _, ok = codecs[t]; ok {
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", ErrUnknownContentType
|
||||
}
|
||||
|
||||
func getExt(name string) string {
|
||||
ext := filepath.Ext(name)
|
||||
if len(ext) > 0 && ext[0] == '.' {
|
||||
ext = ext[1:]
|
||||
}
|
||||
return ext
|
||||
}
|
||||
|
||||
func getNameWithoutExt(name string) string {
|
||||
return strings.TrimSuffix(name, filepath.Ext(name))
|
||||
}
|
||||
|
||||
func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) {
|
||||
reqbuf, err := os.ReadFile(reqfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
endpoint := path.Base(path.Dir(reqfile))
|
||||
if idx := strings.Index(endpoint, "_"); idx > 0 {
|
||||
endpoint = endpoint[idx+1:]
|
||||
}
|
||||
ext := getExt(reqfile)
|
||||
|
||||
ct, err := getContentType(c.Options().Codecs, ext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req := c.NewRequest("test", endpoint, &codec.Frame{Data: reqbuf}, client.RequestContentType(ct))
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func SQLFromFile(m sqlmock.Sqlmock, name string) error {
|
||||
fp, err := os.Open(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fp.Close()
|
||||
return SQLFromReader(m, fp)
|
||||
}
|
||||
|
||||
func SQLFromBytes(m sqlmock.Sqlmock, buf []byte) error {
|
||||
return SQLFromReader(m, bytes.NewReader(buf))
|
||||
}
|
||||
|
||||
func SQLFromString(m sqlmock.Sqlmock, buf string) error {
|
||||
return SQLFromReader(m, strings.NewReader(buf))
|
||||
}
|
||||
|
||||
func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
|
||||
var rows *sqlmock.Rows
|
||||
var exp *sqlmock.ExpectedQuery
|
||||
var columns []*sqlmock.Column
|
||||
|
||||
br := bufio.NewReader(r)
|
||||
|
||||
for {
|
||||
s, err := br.ReadString('\n')
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
} else if err == io.EOF && len(s) == 0 {
|
||||
if rows != nil && exp != nil {
|
||||
exp.WillReturnRows(rows)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if s[0] != '#' {
|
||||
r := csv.NewReader(strings.NewReader(s))
|
||||
r.Comma = ','
|
||||
var records [][]string
|
||||
records, err = r.ReadAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rows == nil && len(columns) > 0 {
|
||||
rows = m.NewRowsWithColumnDefinition(columns...)
|
||||
} else {
|
||||
for idx := 0; idx < len(records); idx++ {
|
||||
if len(columns) == 0 {
|
||||
return fmt.Errorf("csv file not valid, does not have %q line", "# columns ")
|
||||
}
|
||||
rows = FromCSVString(columns, rows, strings.Join(records[idx], ","))
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if rows != nil {
|
||||
exp.WillReturnRows(rows)
|
||||
rows = nil
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "columns"):
|
||||
for _, field := range strings.Split(s[2+len("columns")+1:], ",") {
|
||||
args := strings.Split(field, "|")
|
||||
|
||||
column := sqlmock.NewColumn(args[0]).Nullable(false)
|
||||
|
||||
if len(args) > 1 {
|
||||
for _, arg := range args {
|
||||
switch arg {
|
||||
case "BOOLEAN", "BOOL":
|
||||
column = column.OfType("BOOL", false)
|
||||
case "NUMBER", "DECIMAL":
|
||||
column = column.OfType("DECIMAL", float64(0.0)).WithPrecisionAndScale(10, 4)
|
||||
case "VARCHAR":
|
||||
column = column.OfType("VARCHAR", nil)
|
||||
case "NULL":
|
||||
column = column.Nullable(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
columns = append(columns, column)
|
||||
}
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "begin"):
|
||||
m.ExpectBegin()
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "commit"):
|
||||
m.ExpectCommit()
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "rollback"):
|
||||
m.ExpectRollback()
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "exec "):
|
||||
m.ExpectExec(s[2+len("exec "):])
|
||||
case strings.HasPrefix(strings.ToLower(s[2:]), "query "):
|
||||
exp = m.ExpectQuery(s[2+len("query "):])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Run(ctx context.Context, c client.Client, m sqlmock.Sqlmock, dir string, exts []string) error {
|
||||
tcases, err := GetCases(dir, exts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g, gctx := errgroup.WithContext(ctx)
|
||||
if !strings.Contains(dir, "parallel") {
|
||||
g.SetLimit(1)
|
||||
}
|
||||
|
||||
for _, tcase := range tcases {
|
||||
|
||||
for _, dbfile := range tcase.dbfiles {
|
||||
if err = SQLFromFile(m, dbfile); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
tc := tcase
|
||||
g.Go(func() error {
|
||||
var xrid string
|
||||
var gerr error
|
||||
|
||||
treq, err := NewRequestFromFile(c, tc.reqfile)
|
||||
if err != nil {
|
||||
gerr = fmt.Errorf("failed to read request from file %s err: %w", tc.reqfile, err)
|
||||
return gerr
|
||||
}
|
||||
|
||||
xrid = fmt.Sprintf("%s-%d", treq.Endpoint(), time.Now().Unix())
|
||||
|
||||
defer func() {
|
||||
if gerr == nil {
|
||||
fmt.Printf("test %s xrid: %s status: success\n", filepath.Dir(tc.reqfile), xrid)
|
||||
} else {
|
||||
fmt.Printf("test %s xrid: %s status: failure error: %v\n", filepath.Dir(tc.reqfile), xrid, err)
|
||||
}
|
||||
}()
|
||||
|
||||
data := &codec.Frame{}
|
||||
md := metadata.New(1)
|
||||
md.Set("X-Request-Id", xrid)
|
||||
cerr := c.Call(metadata.NewOutgoingContext(gctx, md), treq, data, client.WithContentType(treq.ContentType()))
|
||||
|
||||
var rspfile string
|
||||
|
||||
if tc.errfile != "" {
|
||||
rspfile = tc.errfile
|
||||
} else if tc.rspfile != "" {
|
||||
rspfile = tc.rspfile
|
||||
} else {
|
||||
gerr = fmt.Errorf("errfile and rspfile is empty")
|
||||
return gerr
|
||||
}
|
||||
|
||||
expectRsp, err := NewResponseFromFile(rspfile)
|
||||
if err != nil {
|
||||
gerr = fmt.Errorf("failed to read response from file %s err: %w", rspfile, err)
|
||||
return gerr
|
||||
}
|
||||
|
||||
testCodec, err := getCodec(Codecs, getExt(tc.reqfile))
|
||||
if err != nil {
|
||||
gerr = fmt.Errorf("failed to get response file codec err: %w", err)
|
||||
return gerr
|
||||
}
|
||||
|
||||
expectCodec, err := getCodec(Codecs, getExt(rspfile))
|
||||
if err != nil {
|
||||
gerr = fmt.Errorf("failed to get response file codec err: %w", err)
|
||||
return gerr
|
||||
}
|
||||
|
||||
if cerr == nil && tc.errfile != "" {
|
||||
gerr = fmt.Errorf("expected err %s not happened", expectRsp.Data)
|
||||
return gerr
|
||||
} else if cerr != nil && tc.errfile != "" {
|
||||
if err = ResponseCompareFunc(expectRsp.Data, cerr, expectCodec, testCodec); err != nil {
|
||||
gerr = err
|
||||
return gerr
|
||||
}
|
||||
} else if cerr != nil && tc.errfile == "" {
|
||||
gerr = cerr
|
||||
return gerr
|
||||
} else if cerr == nil && tc.errfile == "" {
|
||||
if err = ResponseCompareFunc(expectRsp.Data, data, expectCodec, testCodec); err != nil {
|
||||
gerr = err
|
||||
return gerr
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
cf, err := getCodec(c.Options().Codecs, getExt(tc.rspfile))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*/
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
return g.Wait()
|
||||
}
|
||||
|
||||
type Case struct {
|
||||
dbfiles []string
|
||||
reqfile string
|
||||
rspfile string
|
||||
errfile string
|
||||
}
|
||||
|
||||
func GetCases(dir string, exts []string) ([]Case, error) {
|
||||
var tcases []Case
|
||||
entries, err := os.ReadDir(dir)
|
||||
if len(entries) == 0 && err != nil {
|
||||
return tcases, err
|
||||
}
|
||||
|
||||
if exts == nil {
|
||||
exts = DefaultExts
|
||||
}
|
||||
|
||||
var dirs []string
|
||||
var dbfiles []string
|
||||
var reqfile, rspfile, errfile string
|
||||
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
dirs = append(dirs, filepath.Join(dir, entry.Name()))
|
||||
continue
|
||||
}
|
||||
if info, err := entry.Info(); err != nil {
|
||||
return tcases, err
|
||||
} else if !info.Mode().IsRegular() {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, ext := range exts {
|
||||
if getExt(entry.Name()) == ext {
|
||||
name := getNameWithoutExt(entry.Name())
|
||||
switch {
|
||||
case strings.HasSuffix(name, "_db"):
|
||||
dbfiles = append(dbfiles, filepath.Join(dir, entry.Name()))
|
||||
case strings.HasSuffix(name, "_req"):
|
||||
reqfile = filepath.Join(dir, entry.Name())
|
||||
case strings.HasSuffix(name, "_rsp"):
|
||||
rspfile = filepath.Join(dir, entry.Name())
|
||||
case strings.HasSuffix(name, "_err"):
|
||||
errfile = filepath.Join(dir, entry.Name())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if reqfile != "" && (rspfile != "" || errfile != "") {
|
||||
tcases = append(tcases, Case{dbfiles: dbfiles, reqfile: reqfile, rspfile: rspfile, errfile: errfile})
|
||||
}
|
||||
|
||||
for _, dir = range dirs {
|
||||
ntcases, err := GetCases(dir, exts)
|
||||
if len(ntcases) == 0 && err != nil {
|
||||
return tcases, err
|
||||
} else if len(ntcases) == 0 {
|
||||
continue
|
||||
}
|
||||
tcases = append(tcases, ntcases...)
|
||||
}
|
||||
|
||||
return tcases, nil
|
||||
}
|
72
util/test/test_test.go
Normal file
72
util/test/test_test.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
)
|
||||
|
||||
func Test_SQLFromFile(t *testing.T) {
|
||||
ctx := context.TODO()
|
||||
db, c, err := sqlmock.New()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
if err = SQLFromFile(c, "testdata/result/01_firstcase/Call_db.csv"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
rows, err := tx.QueryContext(ctx, "select * from test;")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for rows.Next() {
|
||||
var id int64
|
||||
var name string
|
||||
err = rows.Scan(&id, &name)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if id != 1 || name != "test" {
|
||||
t.Fatalf("invalid rows %v %v", id, name)
|
||||
}
|
||||
}
|
||||
|
||||
if err = rows.Close(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = rows.Err(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err = tx.Commit(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err = c.ExpectationsWereMet(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_GetCases(t *testing.T) {
|
||||
files, err := GetCases("testdata/", nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
t.Fatalf("no files matching")
|
||||
}
|
||||
|
||||
if n := len(files); n != 1 {
|
||||
t.Fatalf("invalid number of test cases %d", n)
|
||||
}
|
||||
}
|
6
util/test/testdata/result/01_firstcase/Call_db.csv
vendored
Normal file
6
util/test/testdata/result/01_firstcase/Call_db.csv
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# begin
|
||||
# query select \* from test;
|
||||
# columns id|VARCHAR,name|VARCHAR
|
||||
id,name
|
||||
1,test
|
||||
# commit
|
|
1
util/test/testdata/result/01_firstcase/Call_req.json
vendored
Normal file
1
util/test/testdata/result/01_firstcase/Call_req.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
1
util/test/testdata/result/01_firstcase/Call_rsp.json
vendored
Normal file
1
util/test/testdata/result/01_firstcase/Call_rsp.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
@@ -2,7 +2,9 @@ package time
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -13,39 +15,42 @@ func ParseDuration(s string) (time.Duration, error) {
|
||||
return 0, fmt.Errorf(`time: invalid duration "` + s + `"`)
|
||||
}
|
||||
|
||||
//var sb strings.Builder
|
||||
/*
|
||||
for i, r := range s {
|
||||
switch r {
|
||||
case 'd':
|
||||
n, err := strconv.Atoi(s[idx:i])
|
||||
if err != nil {
|
||||
return 0, errors.New("time: invalid duration " + s)
|
||||
}
|
||||
s[idx:i] = fmt.Sprintf("%d", n*24)
|
||||
default:
|
||||
sb.WriteRune(r)
|
||||
var p int
|
||||
var hours int
|
||||
loop:
|
||||
for i, r := range s {
|
||||
switch r {
|
||||
case 's', 'm':
|
||||
break loop
|
||||
case 'h':
|
||||
d, err := strconv.Atoi(s[p:i])
|
||||
if err != nil {
|
||||
return 0, errors.New("time: invalid duration " + s)
|
||||
}
|
||||
}
|
||||
*/
|
||||
var td time.Duration
|
||||
var err error
|
||||
switch s[len(s)-1] {
|
||||
case 's', 'm', 'h':
|
||||
td, err = time.ParseDuration(s)
|
||||
case 'd':
|
||||
if td, err = time.ParseDuration(s[:len(s)-1] + "h"); err == nil {
|
||||
td *= 24
|
||||
}
|
||||
case 'y':
|
||||
if td, err = time.ParseDuration(s[:len(s)-1] + "h"); err == nil {
|
||||
year := time.Date(time.Now().Year(), time.December, 31, 0, 0, 0, 0, time.Local)
|
||||
days := year.YearDay()
|
||||
td *= 24 * time.Duration(days)
|
||||
hours += d
|
||||
p = i + 1
|
||||
case 'd':
|
||||
d, err := strconv.Atoi(s[p:i])
|
||||
if err != nil {
|
||||
return 0, errors.New("time: invalid duration " + s)
|
||||
}
|
||||
hours += d * 24
|
||||
p = i + 1
|
||||
case 'y':
|
||||
n, err := strconv.Atoi(s[p:i])
|
||||
if err != nil {
|
||||
return 0, errors.New("time: invalid duration " + s)
|
||||
}
|
||||
var d int
|
||||
for j := n - 1; j >= 0; j-- {
|
||||
d += time.Date(time.Now().Year()+j, time.December, 31, 0, 0, 0, 0, time.Local).YearDay()
|
||||
}
|
||||
hours += d * 24
|
||||
p = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
return td, err
|
||||
return time.ParseDuration(fmt.Sprintf("%dh%s", hours, s[p:]))
|
||||
}
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
@@ -62,7 +67,7 @@ func (d *Duration) UnmarshalJSON(b []byte) error {
|
||||
*d = Duration(time.Duration(value))
|
||||
return nil
|
||||
case string:
|
||||
dv, err := time.ParseDuration(value)
|
||||
dv, err := ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@@ -23,27 +23,34 @@ func TestUnmarshalJSON(t *testing.T) {
|
||||
TTL Duration `json:"ttl"`
|
||||
}
|
||||
v := &str{}
|
||||
var err error
|
||||
|
||||
err := json.Unmarshal([]byte(`{"ttl":"10ms"}`), v)
|
||||
err = json.Unmarshal([]byte(`{"ttl":"10ms"}`), v)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
} else if v.TTL != 10000000 {
|
||||
t.Fatalf("invalid duration %v != 10000000", v.TTL)
|
||||
}
|
||||
|
||||
err = json.Unmarshal([]byte(`{"ttl":"1y"}`), v)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
} else if v.TTL != 31536000000000000 {
|
||||
t.Fatalf("invalid duration %v != 31536000000000000", v.TTL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseDuration(t *testing.T) {
|
||||
var td time.Duration
|
||||
var err error
|
||||
t.Skip()
|
||||
|
||||
td, err = ParseDuration("14d4h")
|
||||
if err != nil {
|
||||
t.Fatalf("ParseDuration error: %v", err)
|
||||
}
|
||||
if td.String() != "336h0m0s" {
|
||||
t.Fatalf("ParseDuration 14d != 336h0m0s : %s", td.String())
|
||||
if td.String() != "340h0m0s" {
|
||||
t.Fatalf("ParseDuration 14d != 340h0m0s : %s", td.String())
|
||||
}
|
||||
|
||||
td, err = ParseDuration("1y")
|
||||
if err != nil {
|
||||
t.Fatalf("ParseDuration error: %v", err)
|
||||
|
Reference in New Issue
Block a user