From 5691238a6a25f0d1d8a21497035ace210e6abbda Mon Sep 17 00:00:00 2001 From: Vasiliy Tolstov Date: Tue, 18 Apr 2023 23:47:12 +0300 Subject: [PATCH] util/test: add helper funcs Signed-off-by: Vasiliy Tolstov --- util/test/sqlmock_test.go | 25 -- util/test/test.go | 251 ++++++++++++++++-- util/test/test_test.go | 72 +++++ .../testdata/result/01_firstcase/Call_db.csv | 5 + .../result/01_firstcase/Call_req.json | 1 + .../result/01_firstcase/Call_rsp.json | 1 + 6 files changed, 302 insertions(+), 53 deletions(-) delete mode 100644 util/test/sqlmock_test.go create mode 100644 util/test/test_test.go create mode 100644 util/test/testdata/result/01_firstcase/Call_db.csv create mode 100644 util/test/testdata/result/01_firstcase/Call_req.json create mode 100644 util/test/testdata/result/01_firstcase/Call_rsp.json diff --git a/util/test/sqlmock_test.go b/util/test/sqlmock_test.go deleted file mode 100644 index 6d238f26..00000000 --- a/util/test/sqlmock_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package test - -import ( - "fmt" - "strings" - "testing" - - "github.com/DATA-DOG/go-sqlmock" -) - -func Test_NewSQLRowsFromFile(t *testing.T) { - db, c, err := sqlmock.New() - if err != nil { - t.Fatal(err) - } - defer db.Close() - - rows, err := NewSQLRowsFromFile(c, "testdata/Call.csv") - if err != nil { - t.Fatal(err) - } - if !strings.Contains(fmt.Sprintf("%#+v", rows), `cols:[]string{"DepAgrId", "DepAgrNum", "DepAgrDate", "DepAgrCloseDate", "AccCur", "MainFinaccNum", "MainFinaccName", "MainFinaccId", "MainFinaccOpenDt", "DepAgrStatus", "MainFinaccBal", "DepartCode", "CardAccId"}`) { - t.Fatal("invalid cols after import csv") - } -} diff --git a/util/test/test.go b/util/test/test.go index a92acc52..ab5e4a93 100644 --- a/util/test/test.go +++ b/util/test/test.go @@ -1,28 +1,63 @@ package test import ( + "bufio" + "bytes" + "context" "encoding/csv" "errors" + "fmt" + "io" "os" "path" + "path/filepath" "strings" "github.com/DATA-DOG/go-sqlmock" "go.unistack.org/micro/v4/client" "go.unistack.org/micro/v4/codec" + "golang.org/x/sync/errgroup" ) +func getExt(name string) string { + ext := filepath.Ext(name) + if len(ext) > 0 && ext[0] == '.' { + ext = ext[1:] + } + return ext +} + +func getNameWithoutExt(name string) string { + return strings.TrimSuffix(name, filepath.Ext(name)) +} + var ErrUnknownContentType = errors.New("unknown content type") type Extension struct { Ext []string } -var ExtToTypes = map[string][]string{ - "json": {"application/json", "application/grpc+json"}, - "yaml": {"application/yaml", "application/yml", "text/yaml", "text/yml"}, - "yml": {"application/yaml", "application/yml", "text/yaml", "text/yml"}, - "proto": {"application/grpc", "application/grpc+proto", "application/proto"}, +var ( + ExtToTypes = map[string][]string{ + "json": {"application/json", "application/grpc+json"}, + "yaml": {"application/yaml", "application/yml", "text/yaml", "text/yml"}, + "yml": {"application/yaml", "application/yml", "text/yaml", "text/yml"}, + "proto": {"application/grpc", "application/grpc+proto", "application/proto"}, + } + + DefaultExts = []string{"csv", "json", "yaml", "yml", "proto"} +) + +func clientCall(ctx context.Context, c client.Client, req client.Request, rsp interface{}) error { + return nil +} + +func NewResponseFromFile(rspfile string) (*codec.Frame, error) { + rspbuf, err := os.ReadFile(rspfile) + if err != nil { + return nil, err + } + return &codec.Frame{Data: rspbuf}, nil } func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) { @@ -32,10 +67,7 @@ func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) } endpoint := path.Base(path.Dir(reqfile)) - ext := path.Ext(reqfile) - if len(ext) > 0 && ext[0] == '.' { - ext = ext[1:] - } + ext := getExt(reqfile) var ct string if cts, ok := ExtToTypes[ext]; ok { @@ -56,26 +88,189 @@ func NewRequestFromFile(c client.Client, reqfile string) (client.Request, error) return req, nil } -func NewSQLRowsFromFile(c sqlmock.Sqlmock, file string) (*sqlmock.Rows, error) { - fp, err := os.Open(file) +func SQLFromFile(m sqlmock.Sqlmock, name string) error { + fp, err := os.Open(name) if err != nil { - return nil, err + return err } defer fp.Close() - - r := csv.NewReader(fp) - r.Comma = '\t' - r.Comment = '#' - - records, err := r.ReadAll() - if err != nil { - return nil, err - } - - rows := c.NewRows(records[0]) - for idx := 1; idx < len(records); idx++ { - rows.FromCSVString(strings.Join(records[idx], ";")) - } - - return rows, nil + return SQLFromReader(m, fp) +} + +func SQLFromBytes(m sqlmock.Sqlmock, buf []byte) error { + return SQLFromReader(m, bytes.NewReader(buf)) +} + +func SQLFromString(m sqlmock.Sqlmock, buf string) error { + return SQLFromReader(m, strings.NewReader(buf)) +} + +func SQLFromReader(m sqlmock.Sqlmock, r io.Reader) error { + var rows *sqlmock.Rows + var exp *sqlmock.ExpectedQuery + br := bufio.NewReader(r) + + for { + s, err := br.ReadString('\n') + if err != nil && err != io.EOF { + return err + } else if err == io.EOF && len(s) == 0 { + return nil + } + + if s[0] != '#' { + r := csv.NewReader(strings.NewReader(s)) + r.Comma = ',' + var records [][]string + records, err = r.ReadAll() + if err != nil { + return err + } + if rows == nil { + rows = m.NewRows(records[0]) + } else { + for idx := 0; idx < len(records); idx++ { + rows.FromCSVString(strings.Join(records[idx], ",")) + } + } + continue + } + + if rows != nil { + exp.WillReturnRows(rows) + rows = nil + } + + switch { + case strings.HasPrefix(strings.ToLower(s[2:]), "begin"): + m.ExpectBegin() + case strings.HasPrefix(strings.ToLower(s[2:]), "commit"): + m.ExpectCommit() + case strings.HasPrefix(strings.ToLower(s[2:]), "rollback"): + m.ExpectRollback() + case strings.HasPrefix(strings.ToLower(s[2:]), "exec "): + m.ExpectExec(s[2+len("exec "):]) + case strings.HasPrefix(strings.ToLower(s[2:]), "query "): + exp = m.ExpectQuery(s[2+len("query "):]) + } + } +} + +func RunWithClientExpectResults(ctx context.Context, c client.Client, m sqlmock.Sqlmock, dir string, exts []string) error { + tcases, err := getFiles(dir, exts) + if err != nil { + return err + } + g, gctx := errgroup.WithContext(ctx) + if !strings.Contains(dir, "parallel") { + g.SetLimit(1) + } + + for _, tcase := range tcases { + for _, dbfile := range tcase.dbfiles { + if err = SQLFromFile(m, dbfile); err != nil { + return err + } + } + + for idx := 0; idx < len(tcase.reqfiles); idx++ { + g.TryGo(func() error { + req, err := NewRequestFromFile(c, tcase.reqfiles[idx]) + if err != nil { + return err + } + rsp, err := NewResponseFromFile(tcase.rspfiles[idx]) + if err != nil { + return err + } + data := &codec.Frame{} + err = c.Call(gctx, req, data, client.WithContentType(req.ContentType())) + if err != nil { + return err + } + if !bytes.Equal(rsp.Data, data.Data) { + return fmt.Errorf("rsp not equal test %s != %s", rsp.Data, data.Data) + } + return nil + }) + } + } + return g.Wait() +} + +func RunWithClientExpectErrors(ctx context.Context, c client.Client, dir string) error { + g, gctx := errgroup.WithContext(ctx) + if !strings.Contains(dir, "parallel") { + g.SetLimit(1) + } + _ = gctx + g.TryGo(func() error { + // rsp := &codec.Frame{} + // return c.Call(ctx, req, rsp, client.WithContentType(req.ContentType())) + return nil + }) + return g.Wait() +} + +type Case struct { + dbfiles []string + reqfiles []string + rspfiles []string +} + +func getFiles(dir string, exts []string) ([]Case, error) { + var tcases []Case + entries, err := os.ReadDir(dir) + if len(entries) == 0 && err != nil { + return tcases, err + } + + if exts == nil { + exts = DefaultExts + } + + var dirs []string + var dbfiles, reqfiles, rspfiles []string + + for _, entry := range entries { + if entry.IsDir() { + dirs = append(dirs, filepath.Join(dir, entry.Name())) + continue + } + if info, err := entry.Info(); err != nil { + return tcases, err + } else if !info.Mode().IsRegular() { + continue + } + + for _, ext := range exts { + if getExt(entry.Name()) == ext { + name := getNameWithoutExt(entry.Name()) + switch { + case strings.HasSuffix(name, "_db"): + dbfiles = append(dbfiles, filepath.Join(dir, entry.Name())) + case strings.HasSuffix(name, "_req"): + reqfiles = append(reqfiles, filepath.Join(dir, entry.Name())) + case strings.HasSuffix(name, "_rsp"): + rspfiles = append(rspfiles, filepath.Join(dir, entry.Name())) + } + } + } + } + + if len(reqfiles) > 0 && len(rspfiles) > 0 { + tcases = append(tcases, Case{dbfiles: dbfiles, reqfiles: reqfiles, rspfiles: rspfiles}) + } + + for _, dir = range dirs { + ntcases, err := getFiles(dir, exts) + if len(ntcases) == 0 && err != nil { + return tcases, err + } else if len(ntcases) == 0 { + continue + } + tcases = append(tcases, ntcases...) + } + + return tcases, nil } diff --git a/util/test/test_test.go b/util/test/test_test.go new file mode 100644 index 00000000..58fa2b4b --- /dev/null +++ b/util/test/test_test.go @@ -0,0 +1,72 @@ +package test + +import ( + "context" + "testing" + + "github.com/DATA-DOG/go-sqlmock" +) + +func Test_SQLFromFile(t *testing.T) { + ctx := context.TODO() + db, c, err := sqlmock.New() + if err != nil { + t.Fatal(err) + } + defer db.Close() + + if err = SQLFromFile(c, "testdata/result/01_firstcase/Call_db.csv"); err != nil { + t.Fatal(err) + } + + tx, err := db.BeginTx(ctx, nil) + if err != nil { + t.Fatal(err) + } + + rows, err := tx.QueryContext(ctx, "select * from test;") + if err != nil { + t.Fatal(err) + } + for rows.Next() { + var id int64 + var name string + err = rows.Scan(&id, &name) + if err != nil { + t.Fatal(err) + } + if id != 1 || name != "test" { + t.Fatalf("invalid rows %v %v", id, name) + } + } + + if err = rows.Close(); err != nil { + t.Fatal(err) + } + + if err = rows.Err(); err != nil { + t.Fatal(err) + } + + if err = tx.Commit(); err != nil { + t.Fatal(err) + } + if err = c.ExpectationsWereMet(); err != nil { + t.Fatal(err) + } +} + +func Test_getFiles(t *testing.T) { + files, err := getFiles("testdata/", nil) + if err != nil { + t.Fatal(err) + } + + if len(files) == 0 { + t.Fatalf("no files matching") + } + + if n := len(files); n != 1 { + t.Fatalf("invalid number of test cases %d", n) + } +} diff --git a/util/test/testdata/result/01_firstcase/Call_db.csv b/util/test/testdata/result/01_firstcase/Call_db.csv new file mode 100644 index 00000000..1d305162 --- /dev/null +++ b/util/test/testdata/result/01_firstcase/Call_db.csv @@ -0,0 +1,5 @@ +# begin +# query select \* from test; +id,name +1,test +# commit \ No newline at end of file diff --git a/util/test/testdata/result/01_firstcase/Call_req.json b/util/test/testdata/result/01_firstcase/Call_req.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/util/test/testdata/result/01_firstcase/Call_req.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/util/test/testdata/result/01_firstcase/Call_rsp.json b/util/test/testdata/result/01_firstcase/Call_rsp.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/util/test/testdata/result/01_firstcase/Call_rsp.json @@ -0,0 +1 @@ +{} \ No newline at end of file