util/test: update test cases code #216

Merged
vtolstov merged 1 commits from testcase into master 2023-04-28 07:10:42 +03:00
4 changed files with 354 additions and 95 deletions

13
go.mod
View File

@ -4,7 +4,16 @@ go 1.20
require ( require (
github.com/DATA-DOG/go-sqlmock v1.5.0 github.com/DATA-DOG/go-sqlmock v1.5.0
github.com/imdario/mergo v0.3.14 github.com/imdario/mergo v0.3.15
github.com/patrickmn/go-cache v2.1.0+incompatible github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35 github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5
golang.org/x/sync v0.1.0
golang.org/x/sys v0.7.0
google.golang.org/grpc v1.54.0
google.golang.org/protobuf v1.30.0
)
require (
github.com/golang/protobuf v1.5.3 // indirect
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect
) )

28
go.sum
View File

@ -1,11 +1,31 @@
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/imdario/mergo v0.3.14 h1:fOqeC1+nCuuk6PKQdg9YmosXX7Y7mHX6R/0ZldI9iHo= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/imdario/mergo v0.3.14/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35 h1:4mohWoM/UGg1BvFFiqSPRl5uwJY3rVV0HQX0ETqauqQ= github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5 h1:G/FZtUu7a6NTWl3KUHMV9jkLAh/Rvtf03NWMHaEDl+E=
github.com/silas/dag v0.0.0-20211117232152-9d50aa809f35/go.mod h1:7RTUFBdIRC9nZ7/3RyRNH1bdqIShrDejd1YbLwgPS+I= github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5/go.mod h1:7RTUFBdIRC9nZ7/3RyRNH1bdqIShrDejd1YbLwgPS+I=
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A=
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU=
google.golang.org/grpc v1.54.0 h1:EhTqbhiYeixwWQtAEZAxmV9MGqcjEU2mFx52xCzNyag=
google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -4,21 +4,199 @@ import (
"bufio" "bufio"
"bytes" "bytes"
"context" "context"
"database/sql/driver"
"encoding/csv" "encoding/csv"
"errors"
"fmt" "fmt"
"io" "io"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"reflect"
"strings" "strings"
"time"
"github.com/DATA-DOG/go-sqlmock" sqlmock "github.com/DATA-DOG/go-sqlmock"
"go.unistack.org/micro/v4/client" "go.unistack.org/micro/v4/client"
"go.unistack.org/micro/v4/codec" "go.unistack.org/micro/v4/codec"
"go.unistack.org/micro/v4/errors"
"go.unistack.org/micro/v4/metadata"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
) )
var ErrUnknownContentType = fmt.Errorf("unknown content type")
type Extension struct {
Ext []string
}
var (
// ExtToTypes map file extension to content type
ExtToTypes = map[string][]string{
"json": {"application/json", "application/grpc+json"},
"yaml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
"yml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
"proto": {"application/grpc", "application/grpc+proto", "application/proto"},
}
// DefaultExts specifies default file extensions to load data
DefaultExts = []string{"csv", "json", "yaml", "yml", "proto"}
// Codecs map to detect codec for test file or request content type
Codecs map[string]codec.Codec
// ResponseCompareFunc used to compare actual response with test case data
ResponseCompareFunc = func(expectRsp []byte, testRsp interface{}, expectCodec codec.Codec, testCodec codec.Codec) error {
var err error
expectMap := make(map[string]interface{})
if err = expectCodec.Unmarshal(expectRsp, &expectMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
testMap := make(map[string]interface{})
switch v := testRsp.(type) {
case *codec.Frame:
if err = testCodec.Unmarshal(v.Data, &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
case *errors.Error:
if err = expectCodec.Unmarshal([]byte(v.Error()), &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
case error:
st, ok := status.FromError(v)
if !ok {
return v
}
me := errors.Parse(st.Message())
if me.Code != 0 {
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
break
}
for _, se := range st.Details() {
switch ne := se.(type) {
case proto.Message:
buf, err := testCodec.Marshal(ne)
if err != nil {
return fmt.Errorf("failed to marshal err: %w", err)
}
if err = testCodec.Unmarshal(buf, &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
default:
return st.Err()
}
}
case interface{ GRPCStatus() *status.Status }:
st := v.GRPCStatus()
me := errors.Parse(st.Message())
if me.Code != 0 {
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
break
}
case *status.Status:
me := errors.Parse(v.Message())
if me.Code != 0 {
if err = expectCodec.Unmarshal([]byte(me.Error()), &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
break
}
for _, se := range v.Details() {
switch ne := se.(type) {
case proto.Message:
buf, err := testCodec.Marshal(ne)
if err != nil {
return fmt.Errorf("failed to marshal err: %w", err)
}
if err = testCodec.Unmarshal(buf, &testMap); err != nil {
return fmt.Errorf("failed to unmarshal err: %w", err)
}
default:
return v.Err()
}
}
}
if !reflect.DeepEqual(expectMap, testMap) {
return fmt.Errorf("test: %s != rsp: %s", expectMap, testMap)
}
return nil
}
)
func FromCSVString(columns []*sqlmock.Column, rows *sqlmock.Rows, s string) *sqlmock.Rows {
res := strings.NewReader(strings.TrimSpace(s))
csvReader := csv.NewReader(res)
for {
res, err := csvReader.Read()
if err != nil || res == nil {
break
}
var row []driver.Value
for i, v := range res {
item := CSVColumnParser(strings.TrimSpace(v))
if null, nullOk := columns[i].IsNullable(); null && nullOk && item == nil {
row = append(row, nil)
} else {
row = append(row, item)
}
}
rows = rows.AddRow(row...)
}
return rows
}
func CSVColumnParser(s string) []byte {
switch {
case strings.ToLower(s) == "null":
return nil
case s == "":
return nil
}
return []byte(s)
}
func NewResponseFromFile(rspfile string) (*codec.Frame, error) {
rspbuf, err := os.ReadFile(rspfile)
if err != nil {
return nil, err
}
return &codec.Frame{Data: rspbuf}, nil
}
func getCodec(codecs map[string]codec.Codec, ext string) (codec.Codec, error) {
var c codec.Codec
if cts, ok := ExtToTypes[ext]; ok {
for _, t := range cts {
if c, ok = codecs[t]; ok {
return c, nil
}
}
}
return nil, ErrUnknownContentType
}
func getContentType(codecs map[string]codec.Codec, ext string) (string, error) {
if cts, ok := ExtToTypes[ext]; ok {
for _, t := range cts {
if _, ok = codecs[t]; ok {
return t, nil
}
}
}
return "", ErrUnknownContentType
}
func getExt(name string) string { func getExt(name string) string {
ext := filepath.Ext(name) ext := filepath.Ext(name)
if len(ext) > 0 && ext[0] == '.' { if len(ext) > 0 && ext[0] == '.' {
@ -31,35 +209,6 @@ func getNameWithoutExt(name string) string {
return strings.TrimSuffix(name, filepath.Ext(name)) return strings.TrimSuffix(name, filepath.Ext(name))
} }
var ErrUnknownContentType = errors.New("unknown content type")
type Extension struct {
Ext []string
}
var (
ExtToTypes = map[string][]string{
"json": {"application/json", "application/grpc+json"},
"yaml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
"yml": {"application/yaml", "application/yml", "text/yaml", "text/yml"},
"proto": {"application/grpc", "application/grpc+proto", "application/proto"},
}
DefaultExts = []string{"csv", "json", "yaml", "yml", "proto"}
)
func clientCall(ctx context.Context, c client.Client, req client.Request, rsp interface{}) error {
return nil
}
func NewResponseFromFile(rspfile string) (*codec.Frame, error) {
rspbuf, err := os.ReadFile(rspfile)
if err != nil {
return nil, err
}
return &codec.Frame{Data: rspbuf}, nil
}
func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) { func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) {
reqbuf, err := os.ReadFile(reqfile) reqbuf, err := os.ReadFile(reqfile)
if err != nil { if err != nil {
@ -67,20 +216,14 @@ func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error)
} }
endpoint := path.Base(path.Dir(reqfile)) endpoint := path.Base(path.Dir(reqfile))
if idx := strings.Index(endpoint, "_"); idx > 0 {
endpoint = endpoint[idx+1:]
}
ext := getExt(reqfile) ext := getExt(reqfile)
var ct string ct, err := getContentType(c.Options().Codecs, ext)
if cts, ok := ExtToTypes[ext]; ok { if err != nil {
for _, t := range cts { return nil, err
if _, ok = c.Options().Codecs[t]; ok {
ct = t
break
}
}
}
if ct == "" {
return nil, ErrUnknownContentType
} }
req := c.NewRequest("test", endpoint, &codec.Frame{Data: reqbuf}, client.RequestContentType(ct)) req := c.NewRequest("test", endpoint, &codec.Frame{Data: reqbuf}, client.RequestContentType(ct))
@ -108,6 +251,8 @@ func SQLFromString(m sqlmock.Sqlmock, buf string) error {
func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error { func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
var rows *sqlmock.Rows var rows *sqlmock.Rows
var exp *sqlmock.ExpectedQuery var exp *sqlmock.ExpectedQuery
var columns []*sqlmock.Column
br := bufio.NewReader(r) br := bufio.NewReader(r)
for { for {
@ -115,6 +260,9 @@ func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
return err return err
} else if err == io.EOF && len(s) == 0 { } else if err == io.EOF && len(s) == 0 {
if rows != nil && exp != nil {
exp.WillReturnRows(rows)
}
return nil return nil
} }
@ -126,11 +274,14 @@ func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
if err != nil { if err != nil {
return err return err
} }
if rows == nil { if rows == nil && len(columns) > 0 {
rows = m.NewRows(records[0]) rows = m.NewRowsWithColumnDefinition(columns...)
} else { } else {
for idx := 0; idx < len(records); idx++ { for idx := 0; idx < len(records); idx++ {
rows.FromCSVString(strings.Join(records[idx], ",")) if len(columns) == 0 {
return fmt.Errorf("csv file not valid, does not have %q line", "# columns ")
}
rows = FromCSVString(columns, rows, strings.Join(records[idx], ","))
} }
} }
continue continue
@ -142,6 +293,29 @@ func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
} }
switch { switch {
case strings.HasPrefix(strings.ToLower(s[2:]), "columns"):
for _, field := range strings.Split(s[2+len("columns")+1:], ",") {
args := strings.Split(field, "|")
column := sqlmock.NewColumn(args[0]).Nullable(false)
if len(args) > 1 {
for _, arg := range args {
switch arg {
case "BOOLEAN", "BOOL":
column = column.OfType("BOOL", false)
case "NUMBER", "DECIMAL":
column = column.OfType("DECIMAL", float64(0.0)).WithPrecisionAndScale(10, 4)
case "VARCHAR":
column = column.OfType("VARCHAR", nil)
case "NULL":
column = column.Nullable(true)
}
}
}
columns = append(columns, column)
}
case strings.HasPrefix(strings.ToLower(s[2:]), "begin"): case strings.HasPrefix(strings.ToLower(s[2:]), "begin"):
m.ExpectBegin() m.ExpectBegin()
case strings.HasPrefix(strings.ToLower(s[2:]), "commit"): case strings.HasPrefix(strings.ToLower(s[2:]), "commit"):
@ -156,66 +330,118 @@ func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error {
} }
} }
func RunWithClientExpectResults(ctx context.Context, c client.Client, m sqlmock.Sqlmock, dir string, exts []string) error { func Run(ctx context.Context, c client.Client, m sqlmock.Sqlmock, dir string, exts []string) error {
tcases, err := GetCases(dir, exts) tcases, err := GetCases(dir, exts)
if err != nil { if err != nil {
return err return err
} }
g, gctx := errgroup.WithContext(ctx) g, gctx := errgroup.WithContext(ctx)
if !strings.Contains(dir, "parallel") { if !strings.Contains(dir, "parallel") {
g.SetLimit(1) g.SetLimit(1)
} }
for _, tcase := range tcases { for _, tcase := range tcases {
for _, dbfile := range tcase.dbfiles { for _, dbfile := range tcase.dbfiles {
if err = SQLFromFile(m, dbfile); err != nil { if err = SQLFromFile(m, dbfile); err != nil {
return err return err
} }
} }
for idx := 0; idx < len(tcase.reqfiles); idx++ { tc := tcase
g.TryGo(func() error { g.Go(func() error {
req, err := NewRequestFromFile(c, tcase.reqfiles[idx]) var xrid string
if err != nil { var gerr error
return err
} treq, err := NewRequestFromFile(c, tc.reqfile)
rsp, err := NewResponseFromFile(tcase.rspfiles[idx]) if err != nil {
if err != nil { gerr = fmt.Errorf("failed to read request from file %s err: %w", tc.reqfile, err)
return err return gerr
} }
data := &codec.Frame{}
err = c.Call(gctx, req, data, client.WithContentType(req.ContentType())) xrid = fmt.Sprintf("%s-%d", treq.Endpoint(), time.Now().Unix())
if err != nil {
return err defer func() {
} if gerr == nil {
if !bytes.Equal(rsp.Data, data.Data) { fmt.Printf("test %s xrid: %s status: success\n", filepath.Dir(tc.reqfile), xrid)
return fmt.Errorf("rsp not equal test %s != %s", rsp.Data, data.Data) } else {
} fmt.Printf("test %s xrid: %s status: failure error: %v\n", filepath.Dir(tc.reqfile), xrid, err)
return nil }
}) }()
}
} data := &codec.Frame{}
return g.Wait() md := metadata.New(1)
} md.Set("X-Request-Id", xrid)
cerr := c.Call(metadata.NewOutgoingContext(gctx, md), treq, data, client.WithContentType(treq.ContentType()))
var rspfile string
if tc.errfile != "" {
rspfile = tc.errfile
} else if tc.rspfile != "" {
rspfile = tc.rspfile
} else {
gerr = fmt.Errorf("errfile and rspfile is empty")
return gerr
}
expectRsp, err := NewResponseFromFile(rspfile)
if err != nil {
gerr = fmt.Errorf("failed to read response from file %s err: %w", rspfile, err)
return gerr
}
testCodec, err := getCodec(Codecs, getExt(tc.reqfile))
if err != nil {
gerr = fmt.Errorf("failed to get response file codec err: %w", err)
return gerr
}
expectCodec, err := getCodec(Codecs, getExt(rspfile))
if err != nil {
gerr = fmt.Errorf("failed to get response file codec err: %w", err)
return gerr
}
if cerr == nil && tc.errfile != "" {
gerr = fmt.Errorf("expected err %s not happened", expectRsp.Data)
return gerr
} else if cerr != nil && tc.errfile != "" {
if err = ResponseCompareFunc(expectRsp.Data, cerr, expectCodec, testCodec); err != nil {
gerr = err
return gerr
}
} else if cerr != nil && tc.errfile == "" {
gerr = cerr
return gerr
} else if cerr == nil && tc.errfile == "" {
if err = ResponseCompareFunc(expectRsp.Data, data, expectCodec, testCodec); err != nil {
gerr = err
return gerr
}
}
/*
cf, err := getCodec(c.Options().Codecs, getExt(tc.rspfile))
if err != nil {
return err
}
*/
return nil
})
func RunWithClientExpectErrors(ctx context.Context, c client.Client, dir string) error {
g, gctx := errgroup.WithContext(ctx)
if !strings.Contains(dir, "parallel") {
g.SetLimit(1)
} }
_ = gctx
g.TryGo(func() error {
// rsp := &codec.Frame{}
// return c.Call(ctx, req, rsp, client.WithContentType(req.ContentType()))
return nil
})
return g.Wait() return g.Wait()
} }
type Case struct { type Case struct {
dbfiles []string dbfiles []string
reqfiles []string reqfile string
rspfiles []string rspfile string
errfile string
} }
func GetCases(dir string, exts []string) ([]Case, error) { func GetCases(dir string, exts []string) ([]Case, error) {
@ -230,7 +456,8 @@ func GetCases(dir string, exts []string) ([]Case, error) {
} }
var dirs []string var dirs []string
var dbfiles, reqfiles, rspfiles []string var dbfiles []string
var reqfile, rspfile, errfile string
for _, entry := range entries { for _, entry := range entries {
if entry.IsDir() { if entry.IsDir() {
@ -250,16 +477,18 @@ func GetCases(dir string, exts []string) ([]Case, error) {
case strings.HasSuffix(name, "_db"): case strings.HasSuffix(name, "_db"):
dbfiles = append(dbfiles, filepath.Join(dir, entry.Name())) dbfiles = append(dbfiles, filepath.Join(dir, entry.Name()))
case strings.HasSuffix(name, "_req"): case strings.HasSuffix(name, "_req"):
reqfiles = append(reqfiles, filepath.Join(dir, entry.Name())) reqfile = filepath.Join(dir, entry.Name())
case strings.HasSuffix(name, "_rsp"): case strings.HasSuffix(name, "_rsp"):
rspfiles = append(rspfiles, filepath.Join(dir, entry.Name())) rspfile = filepath.Join(dir, entry.Name())
case strings.HasSuffix(name, "_err"):
errfile = filepath.Join(dir, entry.Name())
} }
} }
} }
} }
if len(reqfiles) > 0 && len(rspfiles) > 0 { if reqfile != "" && (rspfile != "" || errfile != "") {
tcases = append(tcases, Case{dbfiles: dbfiles, reqfiles: reqfiles, rspfiles: rspfiles}) tcases = append(tcases, Case{dbfiles: dbfiles, reqfile: reqfile, rspfile: rspfile, errfile: errfile})
} }
for _, dir = range dirs { for _, dir = range dirs {

View File

@ -1,5 +1,6 @@
# begin # begin
# query select \* from test; # query select \* from test;
# columns id|VARCHAR,name|VARCHAR
id,name id,name
1,test 1,test
# commit # commit
1 # begin
2 # query select \* from test;
3 # columns id|VARCHAR,name|VARCHAR
4 id,name
5 1,test
6 # commit