#19 #20
12
.gitea/pkgdashcli.yaml
Normal file
12
.gitea/pkgdashcli.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
branches:
|
||||
- master
|
||||
source:
|
||||
type: gitea
|
||||
apiurl: git.unistack.org
|
||||
repository: pkgdash
|
||||
owner: kgorbunov
|
||||
update_opt:
|
||||
pre: false
|
||||
major: false
|
||||
up_major: false
|
||||
cached: true
|
27
.gitignore
vendored
27
.gitignore
vendored
@ -1,7 +1,6 @@
|
||||
# ---> Go
|
||||
# If you prefer the allow list template instead of the deny list, see community template:
|
||||
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
|
||||
#
|
||||
# Mac OS X files
|
||||
*.DS_Store
|
||||
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
@ -9,15 +8,25 @@
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
# Test binary, build with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
||||
.glide/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
# Dependency directories
|
||||
vendor/
|
||||
|
||||
bin/
|
||||
.idea
|
||||
.vscode
|
||||
.env
|
||||
tmp/
|
||||
|
||||
cmd/pkgdash/pkgdash
|
||||
cmd/pkgdashcli/pkgdashcli
|
||||
*.sqlite
|
||||
*.db
|
22
Makefile
Normal file
22
Makefile
Normal file
@ -0,0 +1,22 @@
|
||||
.PHONY: build
|
||||
build:
|
||||
GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdash -mod=readonly go.unistack.org/pkgdash/cmd/pkgdash
|
||||
GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdashcli -mod=readonly go.unistack.org/pkgdash/cmd/pkgdashcli
|
||||
|
||||
.PHONY: buildcli
|
||||
buildcli:
|
||||
CGO_ENABLED=0 go build -o bin/app -mod=readonly go.unistack.org/pkgdash/cmd/pkgdashcli
|
||||
|
||||
.PHONY: cli
|
||||
cli:
|
||||
go install go.unistack.org/pkgdash/cmd/pkgdashcli
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
go test -v ./... -race -cover
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
# Install the binary release of golangci-lint
|
||||
# https://github.com/golangci/golangci-lint#install
|
||||
golangci-lint run
|
45
README.md
45
README.md
@ -1,2 +1,47 @@
|
||||
# pkgdash
|
||||
|
||||
# pkgdashcli
|
||||
|
||||
## Installation
|
||||
|
||||
Use any of the following for a pain-free installation:
|
||||
|
||||
* If you have [`go`](https://go.dev/doc/install) installed, you can run:
|
||||
```shell
|
||||
go install git.unistack.org/unistack-org/pkgdash/cmd/pkgdashcli@latest
|
||||
```
|
||||
|
||||
## Config
|
||||
|
||||
Before starting, you should fill out the configuration file.
|
||||
The file must be located in the .gitea, .gitlab, .github directory
|
||||
according to the git type you are using.
|
||||
|
||||
- branches - array of main branches
|
||||
- source - information about the version control system
|
||||
- source.type - type of version control system
|
||||
- source.apiurl - path to the version control system api
|
||||
- source.repository - repository name
|
||||
- source.owner - username of the repository owner
|
||||
- update_opt - dependency update options
|
||||
- update_opt.pre - update to pre-release versions
|
||||
- update_opt.major - compare only major versions in modules
|
||||
- update_opt.up_major - raise the version according to major
|
||||
- update_opt.cached - caching
|
||||
|
||||
### Config.Example
|
||||
|
||||
```yaml
|
||||
# .gitea/pkgdashcli.yaml
|
||||
branches: [master]
|
||||
source:
|
||||
type: gitea
|
||||
apiurl: git.unistack.org
|
||||
repository: pkgdash
|
||||
owner: unistack
|
||||
update_opt:
|
||||
pre: false
|
||||
major: false
|
||||
up_major: false
|
||||
cached: true
|
||||
```
|
BIN
cmd/pkgdash/assets/ui/favicon.ico
Normal file
BIN
cmd/pkgdash/assets/ui/favicon.ico
Normal file
Binary file not shown.
Before Width: | Height: | Size: 948 B After Width: | Height: | Size: 948 B |
16
cmd/pkgdash/assets/ui/index.html
Normal file
16
cmd/pkgdash/assets/ui/index.html
Normal file
@ -0,0 +1,16 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>UI</title>
|
||||
<base href="/ui/">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link rel="icon" type="image/x-icon" href="favicon.ico">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500&display=swap" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
|
||||
<link rel="stylesheet" href="styles.css"></head>
|
||||
<body class="mat-typography">
|
||||
<app-root></app-root>
|
||||
<script src="runtime.js" type="module"></script><script src="polyfills.js" type="module"></script><script src="vendor.js" type="module"></script><script src="main.js" type="module"></script></body>
|
||||
</html>
|
2386
cmd/pkgdash/assets/ui/main.js
Normal file
2386
cmd/pkgdash/assets/ui/main.js
Normal file
File diff suppressed because it is too large
Load Diff
1
cmd/pkgdash/assets/ui/main.js.map
Normal file
1
cmd/pkgdash/assets/ui/main.js.map
Normal file
File diff suppressed because one or more lines are too long
3398
cmd/pkgdash/assets/ui/polyfills.js
Normal file
3398
cmd/pkgdash/assets/ui/polyfills.js
Normal file
File diff suppressed because it is too large
Load Diff
1
cmd/pkgdash/assets/ui/polyfills.js.map
Normal file
1
cmd/pkgdash/assets/ui/polyfills.js.map
Normal file
File diff suppressed because one or more lines are too long
163
cmd/pkgdash/assets/ui/runtime.js
Normal file
163
cmd/pkgdash/assets/ui/runtime.js
Normal file
@ -0,0 +1,163 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ "use strict";
|
||||
/******/ var __webpack_modules__ = ({});
|
||||
/************************************************************************/
|
||||
/******/ // The module cache
|
||||
/******/ var __webpack_module_cache__ = {};
|
||||
/******/
|
||||
/******/ // The require function
|
||||
/******/ function __webpack_require__(moduleId) {
|
||||
/******/ // Check if module is in cache
|
||||
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
||||
/******/ if (cachedModule !== undefined) {
|
||||
/******/ return cachedModule.exports;
|
||||
/******/ }
|
||||
/******/ // Create a new module (and put it into the cache)
|
||||
/******/ var module = __webpack_module_cache__[moduleId] = {
|
||||
/******/ // no module.id needed
|
||||
/******/ // no module.loaded needed
|
||||
/******/ exports: {}
|
||||
/******/ };
|
||||
/******/
|
||||
/******/ // Execute the module function
|
||||
/******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
|
||||
/******/
|
||||
/******/ // Return the exports of the module
|
||||
/******/ return module.exports;
|
||||
/******/ }
|
||||
/******/
|
||||
/******/ // expose the modules object (__webpack_modules__)
|
||||
/******/ __webpack_require__.m = __webpack_modules__;
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/ /* webpack/runtime/chunk loaded */
|
||||
/******/ (() => {
|
||||
/******/ var deferred = [];
|
||||
/******/ __webpack_require__.O = (result, chunkIds, fn, priority) => {
|
||||
/******/ if(chunkIds) {
|
||||
/******/ priority = priority || 0;
|
||||
/******/ for(var i = deferred.length; i > 0 && deferred[i - 1][2] > priority; i--) deferred[i] = deferred[i - 1];
|
||||
/******/ deferred[i] = [chunkIds, fn, priority];
|
||||
/******/ return;
|
||||
/******/ }
|
||||
/******/ var notFulfilled = Infinity;
|
||||
/******/ for (var i = 0; i < deferred.length; i++) {
|
||||
/******/ var [chunkIds, fn, priority] = deferred[i];
|
||||
/******/ var fulfilled = true;
|
||||
/******/ for (var j = 0; j < chunkIds.length; j++) {
|
||||
/******/ if ((priority & 1 === 0 || notFulfilled >= priority) && Object.keys(__webpack_require__.O).every((key) => (__webpack_require__.O[key](chunkIds[j])))) {
|
||||
/******/ chunkIds.splice(j--, 1);
|
||||
/******/ } else {
|
||||
/******/ fulfilled = false;
|
||||
/******/ if(priority < notFulfilled) notFulfilled = priority;
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ if(fulfilled) {
|
||||
/******/ deferred.splice(i--, 1)
|
||||
/******/ var r = fn();
|
||||
/******/ if (r !== undefined) result = r;
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ return result;
|
||||
/******/ };
|
||||
/******/ })();
|
||||
/******/
|
||||
/******/ /* webpack/runtime/compat get default export */
|
||||
/******/ (() => {
|
||||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||||
/******/ __webpack_require__.n = (module) => {
|
||||
/******/ var getter = module && module.__esModule ?
|
||||
/******/ () => (module['default']) :
|
||||
/******/ () => (module);
|
||||
/******/ __webpack_require__.d(getter, { a: getter });
|
||||
/******/ return getter;
|
||||
/******/ };
|
||||
/******/ })();
|
||||
/******/
|
||||
/******/ /* webpack/runtime/define property getters */
|
||||
/******/ (() => {
|
||||
/******/ // define getter functions for harmony exports
|
||||
/******/ __webpack_require__.d = (exports, definition) => {
|
||||
/******/ for(var key in definition) {
|
||||
/******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
|
||||
/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ };
|
||||
/******/ })();
|
||||
/******/
|
||||
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
||||
/******/ (() => {
|
||||
/******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
|
||||
/******/ })();
|
||||
/******/
|
||||
/******/ /* webpack/runtime/make namespace object */
|
||||
/******/ (() => {
|
||||
/******/ // define __esModule on exports
|
||||
/******/ __webpack_require__.r = (exports) => {
|
||||
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
|
||||
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
||||
/******/ }
|
||||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||||
/******/ };
|
||||
/******/ })();
|
||||
/******/
|
||||
/******/ /* webpack/runtime/jsonp chunk loading */
|
||||
/******/ (() => {
|
||||
/******/ // no baseURI
|
||||
/******/
|
||||
/******/ // object to store loaded and loading chunks
|
||||
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
|
||||
/******/ // [resolve, reject, Promise] = chunk loading, 0 = chunk loaded
|
||||
/******/ var installedChunks = {
|
||||
/******/ "runtime": 0
|
||||
/******/ };
|
||||
/******/
|
||||
/******/ // no chunk on demand loading
|
||||
/******/
|
||||
/******/ // no prefetching
|
||||
/******/
|
||||
/******/ // no preloaded
|
||||
/******/
|
||||
/******/ // no HMR
|
||||
/******/
|
||||
/******/ // no HMR manifest
|
||||
/******/
|
||||
/******/ __webpack_require__.O.j = (chunkId) => (installedChunks[chunkId] === 0);
|
||||
/******/
|
||||
/******/ // install a JSONP callback for chunk loading
|
||||
/******/ var webpackJsonpCallback = (parentChunkLoadingFunction, data) => {
|
||||
/******/ var [chunkIds, moreModules, runtime] = data;
|
||||
/******/ // add "moreModules" to the modules object,
|
||||
/******/ // then flag all "chunkIds" as loaded and fire callback
|
||||
/******/ var moduleId, chunkId, i = 0;
|
||||
/******/ if(chunkIds.some((id) => (installedChunks[id] !== 0))) {
|
||||
/******/ for(moduleId in moreModules) {
|
||||
/******/ if(__webpack_require__.o(moreModules, moduleId)) {
|
||||
/******/ __webpack_require__.m[moduleId] = moreModules[moduleId];
|
||||
/******/ }
|
||||
/******/ }
|
||||
/******/ if(runtime) var result = runtime(__webpack_require__);
|
||||
/******/ }
|
||||
/******/ if(parentChunkLoadingFunction) parentChunkLoadingFunction(data);
|
||||
/******/ for(;i < chunkIds.length; i++) {
|
||||
/******/ chunkId = chunkIds[i];
|
||||
/******/ if(__webpack_require__.o(installedChunks, chunkId) && installedChunks[chunkId]) {
|
||||
/******/ installedChunks[chunkId][0]();
|
||||
/******/ }
|
||||
/******/ installedChunks[chunkId] = 0;
|
||||
/******/ }
|
||||
/******/ return __webpack_require__.O(result);
|
||||
/******/ }
|
||||
/******/
|
||||
/******/ var chunkLoadingGlobal = self["webpackChunkui"] = self["webpackChunkui"] || [];
|
||||
/******/ chunkLoadingGlobal.forEach(webpackJsonpCallback.bind(null, 0));
|
||||
/******/ chunkLoadingGlobal.push = webpackJsonpCallback.bind(null, chunkLoadingGlobal.push.bind(chunkLoadingGlobal));
|
||||
/******/ })();
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/
|
||||
/******/
|
||||
/******/ })()
|
||||
;
|
||||
//# sourceMappingURL=runtime.js.map
|
1
cmd/pkgdash/assets/ui/runtime.js.map
Normal file
1
cmd/pkgdash/assets/ui/runtime.js.map
Normal file
File diff suppressed because one or more lines are too long
644
cmd/pkgdash/assets/ui/styles.css
Normal file
644
cmd/pkgdash/assets/ui/styles.css
Normal file
File diff suppressed because one or more lines are too long
1
cmd/pkgdash/assets/ui/styles.css.map
Normal file
1
cmd/pkgdash/assets/ui/styles.css.map
Normal file
File diff suppressed because one or more lines are too long
121153
cmd/pkgdash/assets/ui/vendor.js
Normal file
121153
cmd/pkgdash/assets/ui/vendor.js
Normal file
File diff suppressed because one or more lines are too long
1
cmd/pkgdash/assets/ui/vendor.js.map
Normal file
1
cmd/pkgdash/assets/ui/vendor.js.map
Normal file
File diff suppressed because one or more lines are too long
237
cmd/pkgdash/main.go
Normal file
237
cmd/pkgdash/main.go
Normal file
@ -0,0 +1,237 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"embed"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
jsoncodec "go.unistack.org/micro-codec-json/v3"
|
||||
jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v3"
|
||||
yamlcodec "go.unistack.org/micro-codec-yaml/v3"
|
||||
envconfig "go.unistack.org/micro-config-env/v3"
|
||||
fileconfig "go.unistack.org/micro-config-file/v3"
|
||||
vaultconfig "go.unistack.org/micro-config-vault/v3"
|
||||
victoriameter "go.unistack.org/micro-meter-victoriametrics/v3"
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
healthhandler "go.unistack.org/micro-server-http/v3/handler/health"
|
||||
meterhandler "go.unistack.org/micro-server-http/v3/handler/meter"
|
||||
spahandler "go.unistack.org/micro-server-http/v3/handler/spa"
|
||||
swaggerui "go.unistack.org/micro-server-http/v3/handler/swagger-ui"
|
||||
"go.unistack.org/micro/v3"
|
||||
"go.unistack.org/micro/v3/config"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
slog "go.unistack.org/micro/v3/logger/slog"
|
||||
"go.unistack.org/micro/v3/meter"
|
||||
"go.unistack.org/micro/v3/server"
|
||||
rutil "go.unistack.org/micro/v3/util/reflect"
|
||||
appconfig "go.unistack.org/pkgdash/internal/config"
|
||||
"go.unistack.org/pkgdash/internal/database"
|
||||
"go.unistack.org/pkgdash/internal/handler"
|
||||
"go.unistack.org/pkgdash/internal/storage"
|
||||
_ "go.unistack.org/pkgdash/internal/storage/sqlite"
|
||||
"go.unistack.org/pkgdash/internal/worker"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
const appName = "pkgdash"
|
||||
|
||||
var (
|
||||
BuildDate string = "now" // filled when build
|
||||
AppVersion string = "latest" // filled when build
|
||||
)
|
||||
|
||||
//go:generate rm -rf assets
|
||||
//go:generate mkdir assets
|
||||
//go:generate cp -vr ../../ui/dist/ui assets/
|
||||
//go:embed assets/*
|
||||
var assets embed.FS
|
||||
|
||||
func main() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
log := slog.NewLogger(logger.WithLevel(logger.DebugLevel))
|
||||
if err := log.Init(); err != nil {
|
||||
log.Fatal(ctx, "failed to init logger")
|
||||
}
|
||||
|
||||
cfg := appconfig.NewConfig(appName, AppVersion) // create new empty config
|
||||
vc := vaultconfig.NewConfig(
|
||||
config.AllowFail(true), // that may be not exists
|
||||
config.Struct(cfg), // load from vault
|
||||
config.Codec(jsoncodec.NewCodec()), // vault config in json
|
||||
config.BeforeLoad(func(ctx context.Context, c config.Config) error {
|
||||
return c.Init(
|
||||
vaultconfig.HTTPClient(&http.Client{
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
},
|
||||
}),
|
||||
vaultconfig.Address(cfg.Vault.Addr),
|
||||
vaultconfig.Timeout(5*time.Second),
|
||||
vaultconfig.Token(cfg.Vault.Token),
|
||||
vaultconfig.Path(cfg.Vault.Path),
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
if err := config.Load(ctx,
|
||||
[]config.Config{
|
||||
config.NewConfig( // load from defaults
|
||||
config.Struct(cfg), // pass config struct
|
||||
),
|
||||
fileconfig.NewConfig( // load from file
|
||||
config.AllowFail(true), // that may be not exists
|
||||
config.Struct(cfg), // pass config struct
|
||||
config.Codec(yamlcodec.NewCodec()), // file config in json
|
||||
fileconfig.Path("./local.yaml"), // nearby file
|
||||
),
|
||||
envconfig.NewConfig( // load from environment
|
||||
config.Struct(cfg), // pass config struct
|
||||
),
|
||||
vc,
|
||||
}, config.LoadOverride(true),
|
||||
); err != nil {
|
||||
log.Fatal(ctx, "failed to load config: %v", err)
|
||||
}
|
||||
|
||||
if err := config.Validate(ctx, cfg); err != nil {
|
||||
log.Fatal(ctx, "failed to validate config: %v", err)
|
||||
}
|
||||
|
||||
swaggerui.Config["url"] = "../service.swagger.yaml"
|
||||
|
||||
meter.DefaultMeter = victoriameter.NewMeter(
|
||||
meter.Path(cfg.Meter.Path),
|
||||
meter.WriteFDMetrics(true),
|
||||
meter.WriteProcessMetrics(true),
|
||||
meter.Address(cfg.Meter.Addr),
|
||||
)
|
||||
|
||||
svc := micro.NewService()
|
||||
|
||||
if err := svc.Init(
|
||||
micro.Server(httpsrv.NewServer()),
|
||||
micro.Name(cfg.Server.Name),
|
||||
micro.Version(cfg.Server.Version),
|
||||
); err != nil {
|
||||
log.Fatal(ctx, "failed to init service: %v", err)
|
||||
}
|
||||
|
||||
assetsUI, err := fs.Sub(assets, "assets/ui")
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get assets: %v", err)
|
||||
}
|
||||
|
||||
if err := svc.Server("http").Init(
|
||||
server.Address(cfg.Server.Addr),
|
||||
server.Name(cfg.Server.Name),
|
||||
server.Version(cfg.Server.Version),
|
||||
server.Codec("application/json", jsonpbcodec.NewCodec()),
|
||||
httpsrv.PathHandler(http.MethodGet, "/ui/*", spahandler.Handler("/ui/", assetsUI)),
|
||||
httpsrv.PathHandler(http.MethodHead, "/ui/*", spahandler.Handler("/ui/", assetsUI)),
|
||||
httpsrv.PathHandler(http.MethodGet, "/swagger-ui/*", swaggerui.Handler("/swagger-ui")),
|
||||
); err != nil {
|
||||
log.Fatal(ctx, "failed to init service: %v", err)
|
||||
}
|
||||
|
||||
if err := database.ParseDSN(cfg.Database); err != nil {
|
||||
log.Fatal(ctx, "failed to init database: %v", err)
|
||||
}
|
||||
|
||||
db, err := database.Connect(ctx, cfg.Database, log)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to connect database: %v", err)
|
||||
}
|
||||
|
||||
store, err := storage.NewStorage(cfg.Database.Type, log, db)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to init storage: %v", err)
|
||||
}
|
||||
|
||||
h, err := handler.NewHandler(log, store)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to create handler: %v", err)
|
||||
}
|
||||
|
||||
if err := svc.Init(
|
||||
micro.Logger(
|
||||
log.Clone(logger.WithLevel(logger.ParseLevel(cfg.Server.LoggerLevel))),
|
||||
),
|
||||
); err != nil {
|
||||
log.Fatal(ctx, "failed to init service", err)
|
||||
}
|
||||
|
||||
if err := pb.RegisterPkgdashServer(svc.Server("http"), h); err != nil {
|
||||
log.Fatal(ctx, "failed to register handler", err)
|
||||
}
|
||||
|
||||
intsvc := httpsrv.NewServer(
|
||||
server.Codec("application/json", jsoncodec.NewCodec()),
|
||||
server.Address(cfg.Meter.Addr),
|
||||
)
|
||||
|
||||
if err := intsvc.Init(); err != nil {
|
||||
log.Fatal(ctx, "failed to init http srv: %v", err)
|
||||
}
|
||||
|
||||
if err := healthhandler.RegisterHealthServiceServer(intsvc, healthhandler.NewHandler()); err != nil {
|
||||
log.Fatal(ctx, "failed to set http handler: %v", err)
|
||||
}
|
||||
|
||||
if err := meterhandler.RegisterMeterServiceServer(intsvc, meterhandler.NewHandler()); err != nil {
|
||||
log.Fatal(ctx, "failed to set http handler: %v", err)
|
||||
}
|
||||
|
||||
if err := intsvc.Start(); err != nil {
|
||||
log.Fatal(ctx, "failed to run http srv: %v", err)
|
||||
}
|
||||
|
||||
cw, err := vc.Watch(ctx, config.WatchCoalesce(true), config.WatchInterval(1*time.Second, 5*time.Second))
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to watch config: %v", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := cw.Stop(); err != nil {
|
||||
log.Error(ctx, err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
for {
|
||||
changes, err := cw.Next()
|
||||
if err != nil {
|
||||
log.Error(ctx, "failed to get config update: %v", err)
|
||||
}
|
||||
for k, v := range changes {
|
||||
if err = rutil.SetFieldByPath(cfg, v, k); err != nil {
|
||||
log.Error(ctx, "failed to set config update: %v", err)
|
||||
break
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
for k := range changes {
|
||||
switch k {
|
||||
case "Server.LoggerLevel":
|
||||
if lvl, ok := changes[k].(string); ok {
|
||||
log.Info(ctx, "logger level changed to %s", lvl)
|
||||
log.Level(logger.ParseLevel(lvl))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
worker.Run(ctx, log, store, time.Duration(cfg.App.CheckInterval))
|
||||
}()
|
||||
|
||||
if err = svc.Run(); err != nil {
|
||||
log.Fatal(ctx, "failed to run svc: %v", err)
|
||||
}
|
||||
}
|
756
cmd/pkgdashcli/main.go
Normal file
756
cmd/pkgdashcli/main.go
Normal file
@ -0,0 +1,756 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
stdslog "log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
gitconfig "github.com/go-git/go-git/v5/config"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
"github.com/jdx/go-netrc"
|
||||
yamlcodec "go.unistack.org/micro-codec-yaml/v3"
|
||||
envconfig "go.unistack.org/micro-config-env/v3"
|
||||
fileconfig "go.unistack.org/micro-config-file/v3"
|
||||
microflag "go.unistack.org/micro-config-flag/v3"
|
||||
"go.unistack.org/micro/v3/config"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/micro/v3/logger/slog"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
"go.unistack.org/pkgdash/internal/source"
|
||||
"golang.org/x/mod/modfile"
|
||||
"golang.org/x/mod/semver"
|
||||
)
|
||||
|
||||
// https://docs.github.com/ru/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
var initMsg = `
|
||||
Pkgdashcli allows you to define a version update for a dependency and start
|
||||
merge requests in version control systems.
|
||||
|
||||
Usage:
|
||||
pkgdashcli --command {{command}} --path {{name of dep}}
|
||||
|
||||
Commands:
|
||||
checkupdate | CheckUpdate collects a list of dependencies with the latest updates.
|
||||
list | Returns a list of PR for this repository with update dependencies.
|
||||
update --path {{name of one dep or empty for update all dep}} | Creates a PR with the specified dependency update in path or creates a PR with dependency updates for all modules if path is empty.
|
||||
close --path {{name of one dep or empty for close all pr}} | Closes the PR for the specified dependency or closes all PRs with dependency updates if path is empty .
|
||||
|
||||
Flags:
|
||||
--command | The command to execute
|
||||
--path | The name of the module to create/close the PR, if empty, the command is executed for all modules.
|
||||
`
|
||||
|
||||
var (
|
||||
DefaultPullRequestTitle = `Bump {{.Name}} from {{.VersionOld}} to {{.VersionNew}}`
|
||||
DefaultPullRequestBody = `Bumps {{.Name}} from {{.VersionOld}} to {{.VersionNew}}`
|
||||
)
|
||||
|
||||
var (
|
||||
configFiles = []string{
|
||||
"dependabot.yml",
|
||||
"pkgdashcli.yml",
|
||||
"pkgdashcli.yaml",
|
||||
}
|
||||
configDirs = []string{
|
||||
".gitea",
|
||||
".github",
|
||||
".gitlab",
|
||||
}
|
||||
repoMgmt = map[string]string{
|
||||
".gitea": "gitea",
|
||||
".gogs": "gogs",
|
||||
".github": "github",
|
||||
".gitlab": "gitlab",
|
||||
}
|
||||
repoAPI = map[string]string{
|
||||
".gitea": "git.unistack.org",
|
||||
".gogs": "gogs",
|
||||
".github": "github.com/unistack-org",
|
||||
".gitlab": "gitlab.mtsbank.ru",
|
||||
}
|
||||
)
|
||||
|
||||
type Data struct {
|
||||
Modules map[string]modules.Update
|
||||
}
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
log := slog.NewLogger(slog.WithHandlerFunc(stdslog.NewTextHandler))
|
||||
|
||||
if err = log.Init(logger.WithLevel(logger.DebugLevel)); err != nil {
|
||||
log.Error(ctx, fmt.Sprintf("logger init error: %v", err))
|
||||
}
|
||||
|
||||
cfg := configcli.NewConfig()
|
||||
|
||||
if err = config.Load(ctx,
|
||||
[]config.Config{
|
||||
config.NewConfig(
|
||||
config.Struct(cfg),
|
||||
),
|
||||
envconfig.NewConfig(
|
||||
config.Struct(cfg),
|
||||
),
|
||||
},
|
||||
config.LoadOverride(true),
|
||||
); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to load config: %v", err))
|
||||
}
|
||||
|
||||
for _, configDir := range configDirs {
|
||||
for _, configFile := range configFiles {
|
||||
path := filepath.Join(configDir, configFile)
|
||||
if _, err = os.Stat(path); os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
c := fileconfig.NewConfig(
|
||||
config.AllowFail(false),
|
||||
config.Struct(cfg),
|
||||
config.Codec(yamlcodec.NewCodec()),
|
||||
fileconfig.Path(path),
|
||||
)
|
||||
err = c.Init()
|
||||
if err != nil {
|
||||
log.Error(ctx, fmt.Sprintf("failed to init config: %v", err))
|
||||
}
|
||||
if err = c.Load(ctx, config.LoadOverride(true)); err != nil {
|
||||
log.Error(ctx, fmt.Sprintf("failed to load config: %v", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cfg.PullRequestBody == "" {
|
||||
cfg.PullRequestBody = DefaultPullRequestBody
|
||||
}
|
||||
|
||||
if cfg.PullRequestTitle == "" {
|
||||
cfg.PullRequestTitle = DefaultPullRequestTitle
|
||||
}
|
||||
|
||||
cliCfg := &configcli.Cli{}
|
||||
c := microflag.NewConfig(config.Struct(cliCfg), microflag.FlagErrorHandling(flag.ContinueOnError))
|
||||
|
||||
if err = c.Init(); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("init cli cfg failed: %v", err))
|
||||
}
|
||||
|
||||
if err = c.Load(ctx); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("load cli cfg failed: %v", err))
|
||||
}
|
||||
|
||||
if cliCfg.Path == "" && cliCfg.Command == "" {
|
||||
fmt.Print(initMsg)
|
||||
return
|
||||
}
|
||||
|
||||
path := "."
|
||||
if len(os.Args) > 1 {
|
||||
path = os.Args[1]
|
||||
}
|
||||
|
||||
name, err := modules.FindModFile(path)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf, err := os.ReadFile(name)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to read file", err)
|
||||
}
|
||||
mfile, err := modfile.Parse(name, buf, nil)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to parse file", err)
|
||||
}
|
||||
|
||||
mvs := make(map[string]modules.Update)
|
||||
|
||||
updateOptions := modules.UpdateOptions{
|
||||
Pre: cfg.UpdateOpt.Pre,
|
||||
Major: cfg.UpdateOpt.Major,
|
||||
UpMajor: cfg.UpdateOpt.UpMajor,
|
||||
Cached: cfg.UpdateOpt.Cached,
|
||||
OnUpdate: func(u modules.Update) {
|
||||
var modpath string // new mod path with major
|
||||
if u.Err != nil {
|
||||
log.Error(ctx, fmt.Sprintf("%s: failed: %v", u.Module.Path, u.Err))
|
||||
return
|
||||
}
|
||||
modpath = u.Module.Path
|
||||
v := semver.Major(u.Version)
|
||||
p := modules.ModPrefix(modpath)
|
||||
if !strings.HasPrefix(u.Module.Version, v) && v != "v1" && v != "v0" {
|
||||
switch strings.HasPrefix(u.Module.Path, "gopkg.in") {
|
||||
case true:
|
||||
modpath = p + "." + v
|
||||
case false:
|
||||
modpath = p + "/" + v
|
||||
}
|
||||
}
|
||||
mvs[modpath] = u
|
||||
},
|
||||
}
|
||||
|
||||
for _, req := range mfile.Require {
|
||||
updateOptions.Modules = append(updateOptions.Modules, req.Mod)
|
||||
}
|
||||
|
||||
modules.Updates(updateOptions)
|
||||
|
||||
if err = getRepoMgmt(ctx, log, cfg); err != nil { // Filling in empty config fields.
|
||||
log.Error(ctx, err.Error())
|
||||
}
|
||||
|
||||
if len(cfg.Branches) == 0 {
|
||||
branchName, err := getCurrentBranch(ctx)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get current branch", err)
|
||||
}
|
||||
cfg.Branches = append(cfg.Branches, branchName)
|
||||
}
|
||||
|
||||
if cfg.Source.Owner == "" {
|
||||
owner, err := getOwnerRepository(ctx)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get current repository", err)
|
||||
}
|
||||
cfg.Source.Owner = owner
|
||||
}
|
||||
|
||||
if cfg.Source.Repository == "" {
|
||||
repository, err := getCurrentRepository(ctx)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get current repository", err)
|
||||
}
|
||||
cfg.Source.Repository = repository
|
||||
}
|
||||
|
||||
gitSource := source.NewSourceControl(*cfg, log)
|
||||
|
||||
Execute(ctx, log, gitSource, mvs, *cliCfg, *cfg)
|
||||
|
||||
log.Info(ctx, "Pkgdash successfully updated dependencies")
|
||||
}
|
||||
|
||||
func Execute(ctx context.Context, log logger.Logger, gitSource source.SourceControl, mvs map[string]modules.Update, cliCfg configcli.Cli, cfg configcli.Config) {
|
||||
var mod modules.Update
|
||||
var ok bool
|
||||
var path string
|
||||
prList := make(map[string]map[string]string)
|
||||
|
||||
switch cliCfg.Command {
|
||||
case "checkupdate":
|
||||
js, err := json.Marshal(mvs)
|
||||
fmt.Println(fmt.Sprintf(`Modules get update: %s, %s`, js, err))
|
||||
case "open":
|
||||
if cliCfg.Path != "" { // update one dep
|
||||
path = cliCfg.Path
|
||||
if mod, ok = mvs[path]; !ok {
|
||||
log.Fatal(ctx, fmt.Sprintf("For %s update not exist", path))
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
for _, branch := range cfg.Branches {
|
||||
if err := gitSource.RequestOpen(ctx, branch, path, mod); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
|
||||
}
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
|
||||
return
|
||||
}
|
||||
for _, branch := range cfg.Branches { // update all dep
|
||||
for path, mod = range mvs {
|
||||
log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
err := gitSource.RequestOpen(ctx, branch, path, mod)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "already exists") {
|
||||
log.Debug(ctx, fmt.Sprintf("skip %s, branch already exists", path))
|
||||
continue
|
||||
}
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
|
||||
}
|
||||
}
|
||||
case "update":
|
||||
if cliCfg.Path != "" { // update one dep
|
||||
path = cliCfg.Path
|
||||
if mod, ok = mvs[path]; !ok {
|
||||
log.Fatal(ctx, fmt.Sprintf("For %s update not exist", path))
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
for _, branch := range cfg.Branches {
|
||||
if err := gitSource.RequestUpdate(ctx, branch, path, mod); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
|
||||
}
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
|
||||
return
|
||||
}
|
||||
for _, branch := range cfg.Branches { // update all dep
|
||||
for path, mod = range mvs {
|
||||
log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
err := gitSource.RequestUpdate(ctx, branch, path, mod)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "already exists") {
|
||||
log.Debug(ctx, fmt.Sprintf("skip %s, branch already exists", path))
|
||||
continue
|
||||
}
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
|
||||
}
|
||||
}
|
||||
case "close":
|
||||
if cliCfg.Path != "" { // close one dep
|
||||
path = cliCfg.Path
|
||||
log.Debug(ctx, fmt.Sprintf("Start close for %s", path))
|
||||
for _, branch := range cfg.Branches {
|
||||
if err := gitSource.RequestClose(ctx, branch, path); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err))
|
||||
}
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Close successful for %s", path))
|
||||
return
|
||||
}
|
||||
for _, branch := range cfg.Branches {
|
||||
log.Info(ctx, fmt.Sprintf("Start getting pr for %s", branch))
|
||||
rMap, err := gitSource.RequestList(ctx, branch)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("Error with getting pr list for branch: %s", branch))
|
||||
}
|
||||
|
||||
log.Info(ctx, fmt.Sprintf("for %s:\n%s", branch, rMap))
|
||||
log.Info(ctx, fmt.Sprintf("Start close pr for base branch %s", branch))
|
||||
|
||||
for path = range rMap {
|
||||
log.Debug(ctx, fmt.Sprintf("Start close for %s", path))
|
||||
if err = gitSource.RequestClose(ctx, branch, path); err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err))
|
||||
}
|
||||
log.Debug(ctx, fmt.Sprintf("Close successful for %s", path))
|
||||
}
|
||||
}
|
||||
case "list":
|
||||
for _, branch := range cfg.Branches {
|
||||
rMap, err := gitSource.RequestList(ctx, branch)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("RequestList: error %s", err))
|
||||
}
|
||||
|
||||
prList[branch] = rMap
|
||||
}
|
||||
js, err := json.Marshal(prList)
|
||||
if err != nil {
|
||||
log.Error(ctx, fmt.Sprintf("error: %s", err))
|
||||
}
|
||||
fmt.Printf("for %s:\n%s\n", cfg.Source.Repository, js)
|
||||
default:
|
||||
fmt.Print(initMsg)
|
||||
}
|
||||
}
|
||||
|
||||
func getCurrentRepository(ctx context.Context) (string, error) {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
p := filepath.Clean(wd)
|
||||
|
||||
repo, err := git.PlainOpen(p)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cfg, err := repo.Config()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for k, v := range cfg.Remotes {
|
||||
if k != "origin" {
|
||||
continue
|
||||
}
|
||||
|
||||
u, err := url.Parse(v.URLs[0])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return filepath.Base(u.Path), nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("failed to get remotes")
|
||||
}
|
||||
|
||||
func getOwnerRepository(ctx context.Context) (string, error) {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
p := filepath.Clean(wd)
|
||||
|
||||
repo, err := git.PlainOpen(p)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
cfg, err := repo.Config()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for k, v := range cfg.Remotes {
|
||||
if k != "origin" {
|
||||
continue
|
||||
}
|
||||
|
||||
u, err := url.Parse(v.URLs[0])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return filepath.Base(filepath.Dir(u.Path)), nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("failed to get remotes")
|
||||
}
|
||||
|
||||
func getCurrentBranch(ctx context.Context) (string, error) {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
p := filepath.Clean(wd)
|
||||
|
||||
repo, err := git.PlainOpen(p)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ref, err := repo.Head()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return ref.Name().Short(), nil
|
||||
}
|
||||
|
||||
func getRepoMgmt(ctx context.Context, log logger.Logger, cfg *configcli.Config) error {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p := filepath.Clean(wd)
|
||||
for _, configDir := range configDirs {
|
||||
_, err := os.Stat(filepath.Join(p, configDir))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Info(ctx, fmt.Sprintf("check config dir %s", configDir))
|
||||
if name, ok := repoMgmt[configDir]; ok && cfg.Source.TypeGit == "" {
|
||||
cfg.Source.TypeGit = name
|
||||
}
|
||||
if api, ok := repoAPI[configDir]; ok && cfg.Source.APIURL == "" {
|
||||
cfg.Source.APIURL = api
|
||||
}
|
||||
}
|
||||
if p == "/" && cfg.Source.TypeGit == "" && cfg.Source.APIURL == "" {
|
||||
return fmt.Errorf("unknown")
|
||||
}
|
||||
// p = filepath.Clean(filepath.Join(p, ".."))
|
||||
|
||||
usr, err := user.Current()
|
||||
if err != nil {
|
||||
log.Fatal(ctx, fmt.Sprintf("pkgdash/main cant get info about user: %s", err))
|
||||
}
|
||||
|
||||
log.Info(ctx, fmt.Sprintf("try to configure scm source %v", cfg.Source))
|
||||
|
||||
netrcfile := filepath.Join(usr.HomeDir, ".netrc")
|
||||
log.Info(ctx, "try to parse netrc file "+netrcfile)
|
||||
n, err := netrc.Parse(netrcfile)
|
||||
if err != nil {
|
||||
log.Error(ctx, "pkgdash/main cant parse .netrc: %s", err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "try to configure scm for "+cfg.Source.APIURL)
|
||||
if cfg.Source.Username == "" {
|
||||
cfg.Source.Username = n.Machine(cfg.Source.APIURL).Get("login")
|
||||
}
|
||||
if cfg.Source.Password == "" {
|
||||
cfg.Source.Password = n.Machine(cfg.Source.APIURL).Get("password")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func giteaPullRequest(ctx context.Context, log logger.Logger, cfg *configcli.Config, branch string, mods map[string]modules.Update) error {
|
||||
envAPIURL := os.Getenv("GITHUB_API_URL")
|
||||
envREPOSITORY := os.Getenv("GITHUB_REPOSITORY")
|
||||
envTOKEN := os.Getenv("GITHUB_TOKEN")
|
||||
|
||||
var buf []byte
|
||||
var err error
|
||||
|
||||
tplTitle, err := template.New("pull_request_title").Parse(cfg.PullRequestTitle)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to parse template: %v", err)
|
||||
}
|
||||
|
||||
wTitle := bytes.NewBuffer(nil)
|
||||
|
||||
tplBody, err := template.New("pull_request_body").Parse(cfg.PullRequestBody)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to parse template: %v", err)
|
||||
}
|
||||
|
||||
wBody := bytes.NewBuffer(nil)
|
||||
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to open repo: %v", err)
|
||||
}
|
||||
|
||||
if err = repo.FetchContext(ctx, &git.FetchOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN},
|
||||
Force: true,
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
log.Fatal(ctx, "failed to fetch repo: %v", err)
|
||||
}
|
||||
|
||||
var headRef *plumbing.Reference
|
||||
refIter, err := repo.Branches()
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get branches: %v", err)
|
||||
}
|
||||
for {
|
||||
ref, err := refIter.Next()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if ref.Name().String() == branch {
|
||||
headRef = ref
|
||||
break
|
||||
}
|
||||
}
|
||||
refIter.Close()
|
||||
|
||||
if headRef == nil {
|
||||
log.Fatal(ctx, "failed to get repo branch head")
|
||||
}
|
||||
|
||||
log.Info(ctx, "repo head %s", headRef)
|
||||
|
||||
wtree, err := repo.Worktree()
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to get worktree: %v", err)
|
||||
}
|
||||
|
||||
type giteaPull struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Base struct {
|
||||
Ref string `json:"ref"`
|
||||
} `json:"base"`
|
||||
ID int64 `json:"id"`
|
||||
}
|
||||
|
||||
var pulls []*giteaPull
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, envAPIURL+"/repos/"+envREPOSITORY+"/pulls?state=open&token="+envTOKEN, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
if rsp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(buf, &pulls); err != nil {
|
||||
log.Fatal(ctx, "failed to decode response %s err: %v", buf, err)
|
||||
}
|
||||
|
||||
for path := range mods {
|
||||
for _, pull := range pulls {
|
||||
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
|
||||
log.Info(ctx, "skip %s as pr already exists %s", path, pull.URL)
|
||||
delete(mods, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for path, mod := range mods {
|
||||
wTitle.Reset()
|
||||
wBody.Reset()
|
||||
|
||||
log.Info(ctx, "update %s from %s to %s", path, mod.Module.Version, mod.Version)
|
||||
|
||||
log.Info(ctx, "reset worktree")
|
||||
if err = wtree.Reset(&git.ResetOptions{Mode: git.HardReset}); err != nil {
|
||||
log.Fatal(ctx, "failed to reset repo branch: %v", err)
|
||||
}
|
||||
|
||||
if err = wtree.PullContext(ctx, &git.PullOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN},
|
||||
Depth: 1,
|
||||
// RemoteURL :
|
||||
Force: true,
|
||||
RemoteName: "origin",
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
log.Fatal(ctx, "failed to pull repo: %v", err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "checkout ref %s", headRef)
|
||||
if err = wtree.Checkout(&git.CheckoutOptions{
|
||||
Hash: headRef.Hash(),
|
||||
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
|
||||
Create: true,
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
log.Fatal(ctx, "failed to checkout tree: %v", err)
|
||||
}
|
||||
|
||||
epath, err := exec.LookPath("go")
|
||||
if errors.Is(err, exec.ErrDot) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to find go command: %v", err)
|
||||
}
|
||||
|
||||
var cmd *exec.Cmd
|
||||
var out []byte
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
log.Fatal(ctx, "failed to run go mod edit: %s err: %v", out, err)
|
||||
}
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
log.Fatal(ctx, "failed to run go mod tidy: %s err: %v", out, err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "worktree add go.mod")
|
||||
if _, err = wtree.Add("go.mod"); err != nil {
|
||||
log.Fatal(ctx, "failed to add file: %v", err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "worktree add go.sum")
|
||||
if _, err = wtree.Add("go.sum"); err != nil {
|
||||
log.Fatal(ctx, "failed to add file: %v", err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "worktree commit")
|
||||
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
|
||||
Parents: []plumbing.Hash{headRef.Hash()},
|
||||
Author: &object.Signature{
|
||||
Name: "gitea-actions",
|
||||
Email: "info@unistack.org",
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "failed to commit: %v", err)
|
||||
}
|
||||
|
||||
// newref := plumbing.NewHashReference(plumbing.ReferenceName(fmt.Sprintf("refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version)), headRef.Hash())
|
||||
|
||||
/*
|
||||
if err = repo.Storer.SetReference(newref); err != nil {
|
||||
log.Fatal(ctx, "failed to create repo branch: %v", err)
|
||||
}
|
||||
*/
|
||||
|
||||
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version))
|
||||
|
||||
log.Info(ctx, "try to push refspec %s", refspec)
|
||||
|
||||
if err = repo.PushContext(ctx, &git.PushOptions{
|
||||
RefSpecs: []gitconfig.RefSpec{refspec},
|
||||
Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN},
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
log.Fatal(ctx, "failed to push repo branch: %v", err)
|
||||
}
|
||||
|
||||
data := map[string]string{
|
||||
"Name": path,
|
||||
"VersionOld": mod.Module.Version,
|
||||
"VersionNew": mod.Version,
|
||||
}
|
||||
|
||||
if err = tplTitle.Execute(wTitle, data); err != nil {
|
||||
log.Fatal(ctx, "failed to execute template: %v", err)
|
||||
}
|
||||
if err = tplBody.Execute(wBody, data); err != nil {
|
||||
log.Fatal(ctx, "failed to execute template: %v", err)
|
||||
}
|
||||
|
||||
body := map[string]string{
|
||||
"base": branch,
|
||||
"body": wBody.String(),
|
||||
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
|
||||
"title": wTitle.String(),
|
||||
}
|
||||
log.Info(ctx, "raw body: %#+v", body)
|
||||
|
||||
buf, err = json.Marshal(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info(ctx, "marshal body: %s", buf)
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, envAPIURL+"/repos/"+envREPOSITORY+"/pulls?token="+envTOKEN, bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rsp.StatusCode != http.StatusCreated {
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
return fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
12
generate.go
Normal file
12
generate.go
Normal file
@ -0,0 +1,12 @@
|
||||
//go:build tools
|
||||
// +build tools
|
||||
|
||||
package main
|
||||
|
||||
//go:generate ./generate.sh
|
||||
|
||||
import (
|
||||
_ "github.com/envoyproxy/protoc-gen-validate"
|
||||
_ "go.unistack.org/micro-proto/v3"
|
||||
_ "go.unistack.org/protoc-gen-go-micro/v3"
|
||||
)
|
15
generate.sh
Executable file
15
generate.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/bin/sh -ex
|
||||
|
||||
PROTO_ARGS=" \
|
||||
--proto_path=$(go list -f '{{ .Dir }}' -m github.com/envoyproxy/protoc-gen-validate) \
|
||||
--proto_path=$(go list -f '{{ .Dir }}' -m go.unistack.org/micro-proto/v3) \
|
||||
--go_out=paths=source_relative:./proto \
|
||||
--go-micro_out=paths=source_relative,components=micro|http,standalone=false:./proto \
|
||||
--validate_out=paths=source_relative,lang=go:./proto \
|
||||
--go-micro_out=components="openapiv3",openapi_file=./apidocs.swagger.yaml,debug=true,paths=source_relative:./proto
|
||||
"
|
||||
|
||||
find ./proto -type f -name "*.pb.go" -delete
|
||||
protoc -I./proto $PROTO_ARGS ./proto/*.proto || find ./proto -type f -name "*.pb.go" -delete
|
||||
|
||||
#./ui/node_modules/.bin/ng-openapi-gen -i ./proto/apidocs.swagger.yaml -o ./ui/src/app/api --removeStaleFiles true --ignoreUnusedModels false
|
125
go.mod
Normal file
125
go.mod
Normal file
@ -0,0 +1,125 @@
|
||||
module go.unistack.org/pkgdash
|
||||
|
||||
go 1.22.7
|
||||
|
||||
toolchain go1.23.3
|
||||
|
||||
require (
|
||||
github.com/envoyproxy/protoc-gen-validate v1.1.0
|
||||
github.com/go-git/go-git/v5 v5.12.1-0.20241206065855-b2aea86f9eef
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jackc/pgx/v5 v5.5.4
|
||||
github.com/jdx/go-netrc v1.0.0
|
||||
github.com/jmoiron/sqlx v1.3.5
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
go.unistack.org/micro-client-http/v3 v3.9.14
|
||||
go.unistack.org/micro-codec-json/v3 v3.10.1
|
||||
go.unistack.org/micro-codec-jsonpb/v3 v3.10.3
|
||||
go.unistack.org/micro-codec-yaml/v3 v3.10.2
|
||||
go.unistack.org/micro-config-env/v3 v3.8.7
|
||||
go.unistack.org/micro-config-file/v3 v3.8.10
|
||||
go.unistack.org/micro-config-flag/v3 v3.8.11
|
||||
go.unistack.org/micro-config-vault/v3 v3.8.9
|
||||
go.unistack.org/micro-meter-victoriametrics/v3 v3.8.9
|
||||
go.unistack.org/micro-proto/v3 v3.4.1
|
||||
go.unistack.org/micro-server-http/v3 v3.11.37
|
||||
go.unistack.org/micro/v3 v3.11.12
|
||||
go.unistack.org/protoc-gen-go-micro/v3 v3.10.10
|
||||
golang.org/x/mod v0.22.0
|
||||
golang.org/x/sync v0.10.0
|
||||
golang.org/x/tools v0.28.0
|
||||
google.golang.org/protobuf v1.35.2
|
||||
modernc.org/sqlite v1.29.5
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/KimMachineGun/automemlimit v0.6.1 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cilium/ebpf v0.9.1 // indirect
|
||||
github.com/containerd/cgroups/v3 v3.0.1 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.3.2 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.3.5 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.4 // indirect
|
||||
github.com/godbus/dbus/v5 v5.0.4 // indirect
|
||||
github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/jackc/pgx/v4 v4.18.3 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.1 // indirect
|
||||
github.com/lib/pq v1.10.9 // indirect
|
||||
github.com/opencontainers/runtime-spec v1.0.2 // indirect
|
||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
|
||||
github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
go.uber.org/automaxprocs v1.6.0 // indirect
|
||||
go.unistack.org/metrics v0.0.1 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20241118233622-e639e219e697 // indirect
|
||||
google.golang.org/grpc v1.68.0 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.1 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.3 // indirect
|
||||
github.com/cloudflare/circl v1.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/fatih/structtag v1.2.0 // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.0 // indirect
|
||||
github.com/golang-migrate/migrate/v4 v4.18.1
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/google/gnostic v0.7.0 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 // indirect
|
||||
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8 // indirect
|
||||
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hashicorp/vault/api v1.15.0 // indirect
|
||||
github.com/iancoleman/strcase v0.3.0 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.14.3 // indirect
|
||||
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.3.3 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 // indirect
|
||||
github.com/jackc/pgtype v1.14.3 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/ryanuber/go-glob v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.0 // indirect
|
||||
github.com/spf13/afero v1.10.0 // indirect
|
||||
github.com/valyala/fastrand v1.1.0 // indirect
|
||||
github.com/valyala/histogram v1.2.0 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
golang.org/x/crypto v0.30.0 // indirect
|
||||
golang.org/x/net v0.32.0 // indirect
|
||||
golang.org/x/sys v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.7.0 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
modernc.org/libc v1.49.0 // indirect
|
||||
modernc.org/mathutil v1.6.0 // indirect
|
||||
modernc.org/memory v1.7.2 // indirect
|
||||
modernc.org/strutil v1.2.0 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
95
internal/analyzer/coverage/coverage.go
Normal file
95
internal/analyzer/coverage/coverage.go
Normal file
@ -0,0 +1,95 @@
|
||||
package coverage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
"golang.org/x/tools/cover"
|
||||
)
|
||||
|
||||
func Analyze(ctx context.Context, dataCoverage io.Reader, pack models.Package) (float64, error) {
|
||||
calculcate, err := calculateFiles(dataCoverage)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
mapCover := make(map[string]float64)
|
||||
{
|
||||
tree, err := GetTreeFromGit(ctx, pack.URL)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
list, err := tree.GoFileList("")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, f := range list {
|
||||
mapCover[f] = 0.0
|
||||
}
|
||||
}
|
||||
|
||||
cur := len(mapCover)
|
||||
|
||||
for _, d := range calculcate.Files {
|
||||
file := strings.TrimPrefix(d.Name, pack.Name+"/")
|
||||
mapCover[file] = d.Coverage
|
||||
}
|
||||
|
||||
// check)
|
||||
if len(mapCover) != cur {
|
||||
fmt.Printf("add new keys, was: %d, has: %d", cur, len(mapCover))
|
||||
}
|
||||
|
||||
// TODO add calculate full
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
type Data struct {
|
||||
Files []*calculateFile
|
||||
Set bool
|
||||
}
|
||||
|
||||
type calculateFile struct {
|
||||
Name string
|
||||
Coverage float64
|
||||
}
|
||||
|
||||
func calculateFiles(coverSrc io.Reader) (d *Data, err error) {
|
||||
profiles, err := cover.ParseProfilesFromReader(coverSrc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
d = new(Data)
|
||||
for _, profile := range profiles {
|
||||
fn := profile.FileName
|
||||
if profile.Mode == "set" {
|
||||
d.Set = true
|
||||
}
|
||||
|
||||
d.Files = append(d.Files, &calculateFile{
|
||||
Name: fn,
|
||||
Coverage: percentCovered(profile),
|
||||
})
|
||||
}
|
||||
return d, err
|
||||
}
|
||||
|
||||
func percentCovered(p *cover.Profile) float64 {
|
||||
var total, covered int64
|
||||
for _, b := range p.Blocks {
|
||||
total += int64(b.NumStmt)
|
||||
if b.Count > 0 {
|
||||
covered += int64(b.NumStmt)
|
||||
}
|
||||
}
|
||||
if total == 0 {
|
||||
return 0
|
||||
}
|
||||
return float64(covered) / float64(total) * 100
|
||||
}
|
37
internal/analyzer/coverage/coverage_test.go
Normal file
37
internal/analyzer/coverage/coverage_test.go
Normal file
@ -0,0 +1,37 @@
|
||||
package coverage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
)
|
||||
|
||||
func Test_Calculate(t *testing.T) {
|
||||
file, err := os.Open("cover_test.out")
|
||||
assert.Nil(t, err)
|
||||
defer func() {
|
||||
assert.Nil(t, file.Close())
|
||||
}()
|
||||
|
||||
dataFiles, err := calculateFiles(file)
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, dataFiles)
|
||||
}
|
||||
|
||||
func Test_Analyze(t *testing.T) {
|
||||
file, err := os.Open("cover_test.out")
|
||||
assert.Nil(t, err)
|
||||
defer func() {
|
||||
assert.Nil(t, file.Close())
|
||||
}()
|
||||
|
||||
analyze, err := Analyze(context.Background(), file, models.Package{
|
||||
Name: "go.unistack.org/micro/v3",
|
||||
URL: "https://git.unistack.org/unistack-org/micro.git",
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, analyze, 0.0)
|
||||
}
|
70
internal/analyzer/coverage/git.go
Normal file
70
internal/analyzer/coverage/git.go
Normal file
@ -0,0 +1,70 @@
|
||||
package coverage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/storage/memory"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
fileNil = errors.New("file pointer is nil")
|
||||
)
|
||||
|
||||
func GetTreeFromGit(ctx context.Context, url string) (*Tree, error) {
|
||||
cloneOpts := &git.CloneOptions{
|
||||
URL: url,
|
||||
Progress: os.Stdout,
|
||||
}
|
||||
|
||||
repo, err := git.CloneContext(ctx, memory.NewStorage(), nil, cloneOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ref, err := repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get head: %v", err)
|
||||
}
|
||||
|
||||
commit, err := repo.CommitObject(ref.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit: %v", err)
|
||||
}
|
||||
|
||||
tree, err := commit.Tree()
|
||||
|
||||
return &Tree{tree}, err
|
||||
}
|
||||
|
||||
type Tree struct {
|
||||
*object.Tree
|
||||
}
|
||||
|
||||
func (t Tree) GoFileList(pattern string) ([]string, error) {
|
||||
matcher, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var list []string
|
||||
err = t.Files().ForEach(func(file *object.File) error {
|
||||
if file == nil {
|
||||
return fileNil
|
||||
}
|
||||
if file.Mode == filemode.Regular && strings.HasSuffix(file.Name, ".go") && !strings.HasSuffix(file.Name, "_test.go") && matcher.MatchString(file.Name) {
|
||||
list = append(list, file.Name)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return list, err
|
||||
}
|
70
internal/config/config.go
Normal file
70
internal/config/config.go
Normal file
@ -0,0 +1,70 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
mtime "go.unistack.org/micro/v3/util/time"
|
||||
)
|
||||
|
||||
type AppConfig struct {
|
||||
CheckInterval mtime.Duration `json:"check_interval" yaml:"check_interval" default:"1d"`
|
||||
}
|
||||
|
||||
type ServerConfig struct {
|
||||
Name string `json:"name" yaml:"name"`
|
||||
Version string `json:"-" yaml:"-"`
|
||||
Addr string `json:"addr" yaml:"addr" default:":9090"`
|
||||
Crt string `json:"crt" yaml:"crt"`
|
||||
Key string `json:"key" yaml:"key"`
|
||||
ID string `json:"-" yaml:"-" default:"micro:generate uuid"`
|
||||
LoggerLevel string `json:"logger_level" yaml:"logger_level"`
|
||||
}
|
||||
|
||||
type TracerConfig struct {
|
||||
Metadata map[string]string `json:"metadata" yaml:"metadata"`
|
||||
AgentHost string `env:"JAEGER_AGENT_HOST" json:"host" yaml:"host" default:"127.0.0.1"`
|
||||
AgentPort string `env:"JAEGER_AGENT_PORT" json:"port" yaml:"port" default:"6831"`
|
||||
Collector string `env:"JAEGER_ENDPOINT,TRACER_ENDPOINT" json:"endpoint" yaml:"endpoint"`
|
||||
}
|
||||
|
||||
type VaultConfig struct {
|
||||
Addr string `env:"VAULT_ADDR" json:"addr" yaml:"addr" default:"http://127.0.0.1:8200"`
|
||||
Token string `env:"VAULT_TOKEN" json:"-" yaml:"-"`
|
||||
Path string `env:"VAULT_PATH" json:"-" yaml:"-" default:"pkgdash/data/pkgdash"`
|
||||
}
|
||||
|
||||
type MeterConfig struct {
|
||||
Addr string `json:"addr" yaml:"addr" default:"0.0.0.0:8080"`
|
||||
Path string `json:"path" yaml:"path" default:"/metrics"`
|
||||
}
|
||||
|
||||
type DatabaseConfig struct {
|
||||
DSN string `json:"dsn" yaml:"dsn"`
|
||||
Type string `json:"-" yaml:"-"`
|
||||
Migrate string `json:"-" yaml:"-"`
|
||||
ConnStr string `json:"-" yaml:"-"`
|
||||
MaxOpenConns int `json:"-" yaml:"-"`
|
||||
MaxIdleConns int `json:"-" yaml:"-"`
|
||||
ConnMaxLifetime time.Duration `json:"-" yaml:"-"`
|
||||
ConnMaxIdleTime time.Duration `json:"-" yaml:"-"`
|
||||
MigrateForce bool `json:"-" yaml:"-"`
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
App *AppConfig `json:"app" yaml:"app"`
|
||||
Database *DatabaseConfig `json:"database" yaml:"database"`
|
||||
Server *ServerConfig `json:"server" yaml:"server"`
|
||||
Meter *MeterConfig `json:"meter" yaml:"meter"`
|
||||
Vault *VaultConfig `json:"-" yaml:"-"`
|
||||
Tracer *TracerConfig `json:"tracer" yaml:"tracer"`
|
||||
}
|
||||
|
||||
func NewConfig(name, version string) *Config {
|
||||
return &Config{
|
||||
App: &AppConfig{},
|
||||
Server: &ServerConfig{Name: name, Version: version},
|
||||
Tracer: &TracerConfig{},
|
||||
Meter: &MeterConfig{},
|
||||
Vault: &VaultConfig{},
|
||||
}
|
||||
}
|
37
internal/configcli/config.go
Normal file
37
internal/configcli/config.go
Normal file
@ -0,0 +1,37 @@
|
||||
package configcli
|
||||
|
||||
type Config struct {
|
||||
PullRequestTitle string `json:"pull_request_title" yaml:"pull_request_title"`
|
||||
PullRequestBody string `json:"pull_request_body" yaml:"pull_request_body"`
|
||||
Branches []string `json:"branches" yaml:"branches"`
|
||||
Source *Source `json:"source" yaml:"source"`
|
||||
UpdateOpt *UpdateOpt `json:"update_opt" yaml:"update_opt"`
|
||||
}
|
||||
|
||||
type Source struct {
|
||||
TypeGit string `json:"type" yaml:"type" env:"GIT_TYPE"`
|
||||
Username string `json:"username" yaml:"username" env:"GIT_USERNAME"`
|
||||
Password string `json:"password" yaml:"password" env:"GIT_PASSWORD,GIT_TOKEN"`
|
||||
APIURL string `json:"apiurl" yaml:"apiurl" env:"GIT_API"`
|
||||
Repository string `json:"repository" yaml:"repository" env:"GIT_REPO"`
|
||||
Owner string `json:"owner" yaml:"owner" env:"GIT_OWNER"`
|
||||
}
|
||||
|
||||
type UpdateOpt struct {
|
||||
Pre bool `json:"pre" yaml:"pre" default:"false"`
|
||||
Major bool `json:"major" yaml:"major" default:"false"`
|
||||
UpMajor bool `json:"up_major" yaml:"up_major" default:"false"`
|
||||
Cached bool `json:"cached" yaml:"cached" default:"true"`
|
||||
}
|
||||
|
||||
type Cli struct {
|
||||
Command string `flag:"name=command,desc='choice command(update, close, checkupdaue, list)',default=''"`
|
||||
Path string `flag:"name=path,desc='title of mod',default=''"`
|
||||
}
|
||||
|
||||
func NewConfig() *Config {
|
||||
return &Config{
|
||||
Source: &Source{},
|
||||
UpdateOpt: &UpdateOpt{},
|
||||
}
|
||||
}
|
253
internal/database/database.go
Normal file
253
internal/database/database.go
Normal file
@ -0,0 +1,253 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/database"
|
||||
mpgx "github.com/golang-migrate/migrate/v4/database/pgx"
|
||||
msqlite "github.com/golang-migrate/migrate/v4/database/sqlite"
|
||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
appconfig "go.unistack.org/pkgdash/internal/config"
|
||||
_ "modernc.org/sqlite"
|
||||
)
|
||||
|
||||
func ParseDSN(cfg *appconfig.DatabaseConfig) error {
|
||||
var err error
|
||||
|
||||
u, err := url.Parse(cfg.DSN)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
values := u.Query()
|
||||
var value string
|
||||
|
||||
if value = values.Get("conn_max"); value != "" {
|
||||
values.Del("conn_max")
|
||||
maxOpenConns, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.MaxOpenConns = maxOpenConns
|
||||
cfg.MaxIdleConns = maxOpenConns / 2
|
||||
}
|
||||
|
||||
if value = values.Get("conn_maxidle"); value != "" {
|
||||
values.Del("conn_maxidle")
|
||||
maxIdleConns, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.MaxIdleConns = maxIdleConns
|
||||
}
|
||||
|
||||
if value = values.Get("conn_lifetime"); value != "" {
|
||||
values.Del("conn_lifetime")
|
||||
connMaxLifetime, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.ConnMaxLifetime = connMaxLifetime
|
||||
}
|
||||
|
||||
if value = values.Get("conn_maxidletime"); value != "" {
|
||||
values.Del("conn_maxidletime")
|
||||
connMaxIdleTime, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cfg.ConnMaxIdleTime = connMaxIdleTime
|
||||
}
|
||||
|
||||
if mtype := values.Get("migrate"); mtype != "" {
|
||||
values.Del("migrate")
|
||||
cfg.Migrate = mtype
|
||||
}
|
||||
|
||||
switch u.Scheme {
|
||||
case "postgres", "pgsql", "postgresql":
|
||||
u.Scheme = "postgres"
|
||||
case "sqlite", "sqlite3":
|
||||
u.Scheme = "sqlite"
|
||||
default:
|
||||
return fmt.Errorf("unknown database %s", u.Scheme)
|
||||
}
|
||||
|
||||
cfg.Type = u.Scheme
|
||||
u.RawQuery = values.Encode()
|
||||
|
||||
cfg.ConnStr = u.String()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func connect(ctx context.Context, cfg *appconfig.DatabaseConfig, log logger.Logger) (*sqlx.DB, error) {
|
||||
var db *sqlx.DB
|
||||
var err error
|
||||
|
||||
log.Info(ctx, "connect to %s", cfg.Type)
|
||||
switch cfg.Type {
|
||||
case "postgres", "pgsql", "postgresql":
|
||||
db, err = connectPostgres(ctx, cfg.ConnStr)
|
||||
cfg.Type = "postgres"
|
||||
case "sqlite", "sqlite3":
|
||||
db, err = connectSqlite(ctx, cfg.ConnStr)
|
||||
cfg.Type = "sqlite"
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown database type %s", cfg.Type)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func Connect(ctx context.Context, cfg *appconfig.DatabaseConfig, log logger.Logger) (*sqlx.DB, error) {
|
||||
db, err := connect(ctx, cfg, log)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err := migratePrepare(ctx, db, log, cfg.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch cfg.Migrate {
|
||||
case "":
|
||||
break
|
||||
case "up":
|
||||
log.Info(ctx, "migrate up")
|
||||
err = m.Up()
|
||||
case "down":
|
||||
log.Info(ctx, "migrate down")
|
||||
err = m.Down()
|
||||
case "seed":
|
||||
log.Info(ctx, "migrate seed")
|
||||
if err = m.Drop(); err == nil {
|
||||
err = m.Up()
|
||||
}
|
||||
default:
|
||||
log.Info(ctx, "migrate version")
|
||||
v, verr := strconv.ParseUint(cfg.Type, 10, 64)
|
||||
if verr != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = m.Migrate(uint(v))
|
||||
}
|
||||
|
||||
if err == nil || err == migrate.ErrNoChange {
|
||||
srcerr, dberr := m.Close()
|
||||
if srcerr != nil {
|
||||
err = srcerr
|
||||
} else if dberr != nil {
|
||||
err = dberr
|
||||
} else {
|
||||
err = nil
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
db, err = connect(ctx, cfg, log)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db.SetConnMaxIdleTime(cfg.ConnMaxIdleTime)
|
||||
db.SetConnMaxLifetime(cfg.ConnMaxLifetime)
|
||||
db.SetMaxIdleConns(cfg.MaxIdleConns)
|
||||
db.SetMaxOpenConns(cfg.MaxOpenConns)
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func connectSqlite(ctx context.Context, connstr string) (*sqlx.DB, error) {
|
||||
if !strings.Contains(connstr, ":memory:") {
|
||||
return sqlx.ConnectContext(ctx, "sqlite", "file:"+connstr[9:])
|
||||
}
|
||||
return sqlx.ConnectContext(ctx, "sqlite", connstr[9:])
|
||||
}
|
||||
|
||||
func connectPostgres(ctx context.Context, connstr string) (*sqlx.DB, error) {
|
||||
// parse connection string
|
||||
dbConf, err := pgx.ParseConfig(connstr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// needed for pgbouncer
|
||||
dbConf.RuntimeParams = map[string]string{
|
||||
"standard_conforming_strings": "on",
|
||||
"application_name": "authn",
|
||||
}
|
||||
|
||||
// may be needed for pbbouncer, needs to check
|
||||
// dbConf.PreferSimpleProtocol = true
|
||||
// register pgx conn
|
||||
connStr := stdlib.RegisterConnConfig(dbConf)
|
||||
|
||||
db, err := sqlx.ConnectContext(ctx, "pgx", connStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func migratePrepare(ctx context.Context, db *sqlx.DB, log logger.Logger, dbtype string) (*migrate.Migrate, error) {
|
||||
var driver database.Driver
|
||||
var err error
|
||||
|
||||
switch dbtype {
|
||||
case "postgres":
|
||||
driver, err = mpgx.WithInstance(db.DB, &mpgx.Config{
|
||||
DatabaseName: "pkgdash",
|
||||
MigrationsTable: "schema_migrations",
|
||||
})
|
||||
case "sqlite":
|
||||
driver, err = msqlite.WithInstance(db.DB, &msqlite.Config{
|
||||
DatabaseName: "pkgdash",
|
||||
MigrationsTable: "schema_migrations",
|
||||
})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
source, err := iofs.New(assets, "migrations/"+dbtype)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err := migrate.NewWithInstance("fs", source, "apigw", driver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m.Log = &mLog{ctx: ctx, l: log}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type mLog struct {
|
||||
ctx context.Context
|
||||
l logger.Logger
|
||||
}
|
||||
|
||||
func (l *mLog) Verbose() bool {
|
||||
return l.l.V(logger.DebugLevel)
|
||||
}
|
||||
|
||||
func (l *mLog) Printf(format string, v ...interface{}) {
|
||||
l.l.Info(l.ctx, format, v...)
|
||||
}
|
8
internal/database/embed.go
Normal file
8
internal/database/embed.go
Normal file
@ -0,0 +1,8 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"embed"
|
||||
)
|
||||
|
||||
//go:embed migrations
|
||||
var assets embed.FS
|
@ -0,0 +1 @@
|
||||
drop table if exists dashboard, package, module, issue, comment;
|
@ -0,0 +1,39 @@
|
||||
create table if not exists dashboard (
|
||||
id serial not null unique primary key ,
|
||||
"uuid" uuid not null unique default gen_random_uuid() ,
|
||||
package integer[] default '{}'::integer[]
|
||||
);
|
||||
|
||||
create table if not exists comment (
|
||||
id serial not null unique primary key ,
|
||||
"text" text ,
|
||||
package integer not null,
|
||||
created timestamp not null default current_timestamp ,
|
||||
updated timestamp default current_timestamp
|
||||
);
|
||||
|
||||
create table if not exists module (
|
||||
id serial not null unique primary key ,
|
||||
name varchar not null ,
|
||||
version varchar not null
|
||||
);
|
||||
|
||||
create table if not exists issue (
|
||||
id serial not null unique primary key ,
|
||||
--package integer references package(id) ,
|
||||
modules integer[] default '{}'::integer[],
|
||||
status integer default 0 ,
|
||||
"desc" varchar
|
||||
);
|
||||
|
||||
create table if not exists package (
|
||||
id serial not null unique primary key ,
|
||||
name varchar not null ,
|
||||
url varchar ,
|
||||
modules integer[] default '{}'::integer[],
|
||||
issues integer[] default '{}'::integer[],
|
||||
comments integer[] default '{}'::integer[]
|
||||
);
|
||||
|
||||
create unique index module_info on module(name, version);
|
||||
|
@ -0,0 +1,5 @@
|
||||
drop table if exists packages;
|
||||
drop table if exists modules;
|
||||
drop table if exists issues;
|
||||
drop table if exists comments;
|
||||
drop table if exists handlers;
|
@ -0,0 +1,57 @@
|
||||
create table if not exists comments (
|
||||
id integer primary key autoincrement not null,
|
||||
comment text,
|
||||
package integer not null,
|
||||
created timestamp not null default current_timestamp,
|
||||
updated timestamp not null default current_timestamp
|
||||
);
|
||||
|
||||
|
||||
create table if not exists issues (
|
||||
id integer primary key autoincrement not null,
|
||||
status integer default 0,
|
||||
comment varchar,
|
||||
created timestamp not null default current_timestamp,
|
||||
updated timestamp not null default current_timestamp
|
||||
);
|
||||
|
||||
create table if not exists handlers (
|
||||
id integer primary key autoincrement not null,
|
||||
package integer not null,
|
||||
name varchar,
|
||||
coverage number default 0
|
||||
);
|
||||
|
||||
create table if not exists packages (
|
||||
id integer primary key autoincrement not null,
|
||||
name varchar not null,
|
||||
url varchar not null,
|
||||
description varchar,
|
||||
modules integer default 0,
|
||||
issues integer default 0,
|
||||
comments integer default 0,
|
||||
coverage number default 0,
|
||||
created timestamp not null default current_timestamp,
|
||||
updated timestamp not null default current_timestamp,
|
||||
status integer default 1,
|
||||
last_check timestamp
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_idx_url on packages (url);
|
||||
|
||||
create table if not exists modules (
|
||||
id integer primary key autoincrement not null,
|
||||
name varchar not null,
|
||||
version varchar not null,
|
||||
last_check timestamp not null default current_timestamp
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_idx_name_version on modules (name,version);
|
||||
|
||||
create table if not exists packages_modules (
|
||||
id integer primary key autoincrement not null,
|
||||
package integer,
|
||||
module integer not null
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS unique_idx_package_module on packages_modules (package,module);
|
39
internal/handler/comment_create.go
Normal file
39
internal/handler/comment_create.go
Normal file
@ -0,0 +1,39 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) CommentCreate(ctx context.Context, req *pb.CommentCreateReq, rsp *pb.CommentCreateRsp) error {
|
||||
h.logger.Debug(ctx, "Start AddComment")
|
||||
|
||||
err := req.Validate()
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "validation error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
var com *models.Comment
|
||||
if com, err = h.store.CommentCreate(ctx, req); err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
httpsrv.SetRspCode(ctx, http.StatusNotFound)
|
||||
return httpsrv.SetError(NewNotFoundError(err))
|
||||
}
|
||||
h.logger.Error(ctx, "comment create error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
rsp.Comment = models.NewComment(com)
|
||||
|
||||
h.logger.Debug(ctx, "Success finish addComment")
|
||||
return nil
|
||||
}
|
36
internal/handler/comment_delete.go
Normal file
36
internal/handler/comment_delete.go
Normal file
@ -0,0 +1,36 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) CommentDelete(ctx context.Context, req *pb.CommentDeleteReq, rsp *pb.CommentDeleteRsp) error {
|
||||
h.logger.Debug(ctx, "Start AddComment")
|
||||
|
||||
err := req.Validate()
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
if err = h.store.CommentDelete(ctx, req); err != nil {
|
||||
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
httpsrv.SetRspCode(ctx, http.StatusNotFound)
|
||||
return httpsrv.SetError(NewNotFoundError(err))
|
||||
}
|
||||
h.logger.Error(ctx, "comment delete error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
h.logger.Debug(ctx, "Success finish addComment")
|
||||
return nil
|
||||
}
|
35
internal/handler/comment_list.go
Normal file
35
internal/handler/comment_list.go
Normal file
@ -0,0 +1,35 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) CommentList(ctx context.Context, req *pb.CommentListReq, rsp *pb.CommentListRsp) error {
|
||||
h.logger.Debug(ctx, "Start GetModule")
|
||||
|
||||
err := req.Validate()
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
comments, err := h.store.CommentList(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "comment list error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
for _, com := range comments {
|
||||
rsp.Comments = append(rsp.Comments, models.NewComment(com))
|
||||
}
|
||||
|
||||
h.logger.Debug(ctx, "Success finish getModule")
|
||||
return nil
|
||||
}
|
11
internal/handler/comment_lookup.go
Normal file
11
internal/handler/comment_lookup.go
Normal file
@ -0,0 +1,11 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) CommentLookup(ctx context.Context, req *pb.CommentLookupReq, rsp *pb.CommentLookupRsp) error {
|
||||
return nil
|
||||
}
|
56
internal/handler/handler.go
Normal file
56
internal/handler/handler.go
Normal file
@ -0,0 +1,56 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/google/uuid"
|
||||
jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v3"
|
||||
"go.unistack.org/micro/v3/codec"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/storage"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
type Handler struct {
|
||||
logger logger.Logger
|
||||
store storage.Storage
|
||||
codec codec.Codec
|
||||
}
|
||||
|
||||
func NewNotFoundError(err error) *pb.ErrorRsp {
|
||||
return &pb.ErrorRsp{
|
||||
Code: strconv.Itoa(http.StatusBadRequest),
|
||||
Title: "NotFound",
|
||||
Uuid: uuid.New().String(),
|
||||
Details: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
func NewInternalError(err error) *pb.ErrorRsp {
|
||||
return &pb.ErrorRsp{
|
||||
Code: strconv.Itoa(http.StatusInternalServerError),
|
||||
Title: "InternalServerError",
|
||||
Uuid: uuid.New().String(),
|
||||
Details: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
func NewValidationError(err error) *pb.ErrorRsp {
|
||||
return &pb.ErrorRsp{
|
||||
Code: strconv.Itoa(http.StatusBadRequest),
|
||||
Title: "BadRequest",
|
||||
Uuid: uuid.New().String(),
|
||||
Details: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
func NewHandler(log logger.Logger, store storage.Storage) (*Handler, error) {
|
||||
h := &Handler{
|
||||
logger: log,
|
||||
codec: jsonpbcodec.NewCodec(),
|
||||
store: store,
|
||||
}
|
||||
|
||||
return h, nil
|
||||
}
|
27
internal/handler/handler_list.go
Normal file
27
internal/handler/handler_list.go
Normal file
@ -0,0 +1,27 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) HandlerList(ctx context.Context, req *pb.HandlerListReq, rsp *pb.HandlerListRsp) error {
|
||||
h.logger.Debug(ctx, "HandlerList handler start")
|
||||
|
||||
packages, err := h.store.HandlerList(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "error db response: %v", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
for _, hdlr := range packages {
|
||||
rsp.Handlers = append(rsp.Handlers, models.NewHandler(hdlr))
|
||||
}
|
||||
h.logger.Debug(ctx, "HandlerList handler stop")
|
||||
return nil
|
||||
}
|
34
internal/handler/modules_list.go
Normal file
34
internal/handler/modules_list.go
Normal file
@ -0,0 +1,34 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) ModuleList(ctx context.Context, req *pb.ModuleListReq, rsp *pb.ModuleListRsp) error {
|
||||
h.logger.Debug(ctx, "Start GetModule")
|
||||
|
||||
err := req.Validate()
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
modules, err := h.store.ModuleList(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "module list error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
for _, mod := range modules {
|
||||
rsp.Modules = append(rsp.Modules, models.NewModule(mod))
|
||||
}
|
||||
h.logger.Debug(ctx, "Success finish getModule")
|
||||
return nil
|
||||
}
|
32
internal/handler/package_create.go
Normal file
32
internal/handler/package_create.go
Normal file
@ -0,0 +1,32 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageCreate(ctx context.Context, req *pb.PackageCreateReq, rsp *pb.PackageCreateRsp) error {
|
||||
h.logger.Debug(ctx, "PackagesCreate handler start")
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
pkg, err := h.store.PackageCreate(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "package create error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
rsp.Package = models.NewPackage(pkg)
|
||||
|
||||
h.logger.Debug(ctx, "PackagesCreate handler stop")
|
||||
return nil
|
||||
}
|
28
internal/handler/package_delete.go
Normal file
28
internal/handler/package_delete.go
Normal file
@ -0,0 +1,28 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageDelete(ctx context.Context, req *pb.PackageDeleteReq, rsp *pb.PackageDeleteRsp) error {
|
||||
h.logger.Debug(ctx, "Start UpdatePackage")
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
if err := h.store.PackageDelete(ctx, req); err != nil {
|
||||
h.logger.Error(ctx, "package delete error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
h.logger.Debug(ctx, "Success finish UpdatePackage")
|
||||
return nil
|
||||
}
|
27
internal/handler/package_list.go
Normal file
27
internal/handler/package_list.go
Normal file
@ -0,0 +1,27 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageList(ctx context.Context, req *pb.PackageListReq, rsp *pb.PackageListRsp) error {
|
||||
h.logger.Debug(ctx, "PackagesList handler start")
|
||||
|
||||
packages, err := h.store.PackageList(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "error db response: %v", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
for _, pkg := range packages {
|
||||
rsp.Packages = append(rsp.Packages, models.NewPackage(pkg))
|
||||
}
|
||||
h.logger.Debug(ctx, "PackagesList handler stop")
|
||||
return nil
|
||||
}
|
32
internal/handler/package_lookup.go
Normal file
32
internal/handler/package_lookup.go
Normal file
@ -0,0 +1,32 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageLookup(ctx context.Context, req *pb.PackageLookupReq, rsp *pb.PackageLookupRsp) error {
|
||||
h.logger.Debug(ctx, "Start PackagesLookup")
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
pkg, err := h.store.PackageLookup(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "package lookup", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
rsp.Package = models.NewPackage(pkg)
|
||||
|
||||
h.logger.Debug(ctx, "Success finish PackagesLookup")
|
||||
return nil
|
||||
}
|
27
internal/handler/package_modules.go
Normal file
27
internal/handler/package_modules.go
Normal file
@ -0,0 +1,27 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageModules(ctx context.Context, req *pb.PackageModulesReq, rsp *pb.PackageModulesRsp) error {
|
||||
h.logger.Debug(ctx, "PackageModules handler start")
|
||||
|
||||
modules, err := h.store.PackageModules(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "error db response: %v", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
for _, mod := range modules {
|
||||
rsp.Modules = append(rsp.Modules, models.NewModule(mod))
|
||||
}
|
||||
h.logger.Debug(ctx, "PackagesModules handler stop")
|
||||
return nil
|
||||
}
|
32
internal/handler/package_update.go
Normal file
32
internal/handler/package_update.go
Normal file
@ -0,0 +1,32 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
httpsrv "go.unistack.org/micro-server-http/v3"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func (h *Handler) PackageUpdate(ctx context.Context, req *pb.PackageUpdateReq, rsp *pb.PackageUpdateRsp) error {
|
||||
h.logger.Debug(ctx, "Start UpdatePackage")
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
h.logger.Error(ctx, "validate error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusBadRequest)
|
||||
return httpsrv.SetError(NewValidationError(err))
|
||||
}
|
||||
|
||||
pkg, err := h.store.PackageUpdate(ctx, req)
|
||||
if err != nil {
|
||||
h.logger.Error(ctx, "package update error", err)
|
||||
httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
|
||||
return httpsrv.SetError(NewInternalError(err))
|
||||
}
|
||||
|
||||
rsp.Package = models.NewPackage(pkg)
|
||||
|
||||
h.logger.Debug(ctx, "Success finish UpdatePackage")
|
||||
return nil
|
||||
}
|
126
internal/models/models.go
Normal file
126
internal/models/models.go
Normal file
@ -0,0 +1,126 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
type Handler struct {
|
||||
ID uint64 `db:"id"`
|
||||
Package uint64 `db:"package"`
|
||||
Name string `db:"name"`
|
||||
Coverage sql.NullFloat64 `db:"coverage"`
|
||||
}
|
||||
|
||||
func NewHandler(hdlr *Handler) *pb.Handler {
|
||||
if hdlr == nil {
|
||||
return nil
|
||||
}
|
||||
rsp := &pb.Handler{
|
||||
Id: hdlr.ID,
|
||||
Package: hdlr.Package,
|
||||
Name: hdlr.Name,
|
||||
}
|
||||
if hdlr.Coverage.Valid {
|
||||
rsp.Coverage = hdlr.Coverage.Float64
|
||||
}
|
||||
return rsp
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
Created time.Time `db:"created"`
|
||||
Updated time.Time `db:"updated"`
|
||||
LastCheck sql.NullTime `db:"last_check"`
|
||||
Type string `db:"type"`
|
||||
Name string `db:"name"`
|
||||
URL string `db:"url"`
|
||||
Description sql.NullString `db:"description"`
|
||||
Coverage sql.NullFloat64 `db:"coverage"`
|
||||
Modules uint64 `db:"modules"`
|
||||
ID uint64 `db:"id"`
|
||||
Status uint64 `db:"status"`
|
||||
Comments uint64 `db:"comments"`
|
||||
Issues uint64 `db:"issues"`
|
||||
}
|
||||
|
||||
func NewPackage(pkg *Package) *pb.Package {
|
||||
if pkg == nil {
|
||||
return nil
|
||||
}
|
||||
rsp := &pb.Package{
|
||||
Name: pkg.Name,
|
||||
Url: pkg.URL,
|
||||
Modules: pkg.Modules,
|
||||
Issues: pkg.Issues,
|
||||
Comments: pkg.Comments,
|
||||
Id: pkg.ID,
|
||||
Created: timestamppb.New(pkg.Created),
|
||||
Updated: timestamppb.New(pkg.Updated),
|
||||
Type: pkg.Type,
|
||||
}
|
||||
if rsp.Type == "" {
|
||||
rsp.Type = "package"
|
||||
}
|
||||
if pkg.Description.Valid {
|
||||
rsp.Description = pkg.Description.String
|
||||
}
|
||||
if pkg.LastCheck.Valid {
|
||||
rsp.LastCheck = timestamppb.New(pkg.LastCheck.Time)
|
||||
}
|
||||
if pkg.Coverage.Valid {
|
||||
rsp.Coverage = pkg.Coverage.Float64
|
||||
}
|
||||
return rsp
|
||||
}
|
||||
|
||||
type Module struct {
|
||||
LastCheck sql.NullTime `db:"last_check"`
|
||||
Name string `db:"name"`
|
||||
Version string `db:"version"`
|
||||
ID uint64 `db:"id"`
|
||||
}
|
||||
|
||||
func NewModule(mod *Module) *pb.Module {
|
||||
if mod == nil {
|
||||
return nil
|
||||
}
|
||||
rsp := &pb.Module{
|
||||
Name: mod.Name,
|
||||
Version: mod.Version,
|
||||
Id: mod.ID,
|
||||
}
|
||||
if mod.LastCheck.Valid {
|
||||
rsp.LastCheck = timestamppb.New(mod.LastCheck.Time)
|
||||
}
|
||||
return rsp
|
||||
}
|
||||
|
||||
type Issue struct {
|
||||
Comment string `db:"comment"`
|
||||
Modules []int64 `db:"modules"`
|
||||
ID uint64 `db:"id"`
|
||||
Status uint64 `db:"status"`
|
||||
Package uint64 `db:"package"`
|
||||
}
|
||||
|
||||
type Comment struct {
|
||||
Created time.Time `db:"created"`
|
||||
Updated time.Time `db:"updated"`
|
||||
Comment string `db:"comment"`
|
||||
ID uint64 `db:"id"`
|
||||
}
|
||||
|
||||
func NewComment(com *Comment) *pb.Comment {
|
||||
if com == nil {
|
||||
return nil
|
||||
}
|
||||
return &pb.Comment{
|
||||
Id: com.ID,
|
||||
Comment: com.Comment,
|
||||
Created: timestamppb.New(com.Created),
|
||||
Updated: timestamppb.New(com.Updated),
|
||||
}
|
||||
}
|
300
internal/modules/modproxy.go
Normal file
300
internal/modules/modproxy.go
Normal file
@ -0,0 +1,300 @@
|
||||
package modules
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/mod/module"
|
||||
"golang.org/x/mod/semver"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
// Module contains the module path and versions
|
||||
type Module struct {
|
||||
Path string
|
||||
Versions []string
|
||||
}
|
||||
|
||||
// MaxVersion returns the latest version.
|
||||
// If there are no versions, the empty string is returned.
|
||||
// Prefix can be used to filter the versions based on a prefix.
|
||||
// If pre is false, pre-release versions will are excluded.
|
||||
func (m *Module) MaxVersion(prefix string, pre bool) string {
|
||||
var max string
|
||||
for _, v := range m.Versions {
|
||||
if !semver.IsValid(v) || !strings.HasPrefix(v, prefix) {
|
||||
continue
|
||||
}
|
||||
if !pre && semver.Prerelease(v) != "" {
|
||||
continue
|
||||
}
|
||||
max = MaxVersion(v, max)
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
// IsNewerVersion returns true if newversion is greater than oldversion in terms of semver.
|
||||
// If major is true, then newversion must be a major version ahead of oldversion to be considered newer.
|
||||
func IsNewerVersion(oldversion, newversion string, major bool) bool {
|
||||
if major {
|
||||
return semver.Compare(semver.Major(oldversion), semver.Major(newversion)) < 0
|
||||
}
|
||||
return semver.Compare(oldversion, newversion) < 0
|
||||
}
|
||||
|
||||
// MaxVersion returns the larger of two versions according to semantic version precedence.
|
||||
// Incompatible versions are considered lower than non-incompatible ones.
|
||||
// Invalid versions are considered lower than valid ones.
|
||||
// If both versions are invalid, the empty string is returned.
|
||||
func MaxVersion(v, w string) string {
|
||||
// sort by validity
|
||||
vValid := semver.IsValid(v)
|
||||
wValid := semver.IsValid(w)
|
||||
if !vValid && !wValid {
|
||||
return ""
|
||||
}
|
||||
if vValid != wValid {
|
||||
if vValid {
|
||||
return v
|
||||
}
|
||||
return w
|
||||
}
|
||||
// sort by compatibility
|
||||
vIncompatible := strings.HasSuffix(semver.Build(v), "+incompatible")
|
||||
wIncompatible := strings.HasSuffix(semver.Build(w), "+incompatible")
|
||||
if vIncompatible != wIncompatible {
|
||||
if wIncompatible {
|
||||
return v
|
||||
}
|
||||
return w
|
||||
}
|
||||
// sort by semver
|
||||
if semver.Compare(v, w) == 1 {
|
||||
return v
|
||||
}
|
||||
return w
|
||||
}
|
||||
|
||||
// NextMajor returns the next major version after the provided version
|
||||
func NextMajor(version string) (string, error) {
|
||||
major, err := strconv.Atoi(strings.TrimPrefix(semver.Major(version), "v"))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
major++
|
||||
return fmt.Sprintf("v%d", major), nil
|
||||
}
|
||||
|
||||
// WithMajorPath returns the module path for the provided version
|
||||
func (m *Module) WithMajorPath(version string) string {
|
||||
prefix := ModPrefix(m.Path)
|
||||
return JoinPath(prefix, version, "")
|
||||
}
|
||||
|
||||
// NextMajorPath returns the module path of the next major version
|
||||
func (m *Module) NextMajorPath() (string, bool) {
|
||||
latest := m.MaxVersion("", true)
|
||||
if latest == "" {
|
||||
return "", false
|
||||
}
|
||||
if semver.Major(latest) == "v0" {
|
||||
return "", false
|
||||
}
|
||||
next, err := NextMajor(latest)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
return m.WithMajorPath(next), true
|
||||
}
|
||||
|
||||
// Query the module proxy for all versions of a module.
|
||||
// If the module does not exist, the second return parameter will be false
|
||||
// cached sets the Disable-Module-Fetch: true header
|
||||
func Query(modpath string, cached bool) (*Module, bool, error) {
|
||||
escaped, err := module.EscapePath(modpath)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
url := fmt.Sprintf("https://proxy.golang.org/%s/@v/list", escaped)
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "GoMajor/1.0")
|
||||
if cached {
|
||||
req.Header.Set("Disable-Module-Fetch", "true")
|
||||
}
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(res.Body)
|
||||
if res.StatusCode == http.StatusNotFound && bytes.HasPrefix(body, []byte("not found:")) {
|
||||
return nil, false, nil
|
||||
}
|
||||
msg := string(body)
|
||||
if msg == "" {
|
||||
msg = res.Status
|
||||
}
|
||||
return nil, false, fmt.Errorf("proxy: %s", msg)
|
||||
}
|
||||
var mod Module
|
||||
mod.Path = modpath
|
||||
sc := bufio.NewScanner(res.Body)
|
||||
for sc.Scan() {
|
||||
mod.Versions = append(mod.Versions, sc.Text())
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
return &mod, true, nil
|
||||
}
|
||||
|
||||
// Latest finds the latest major version of a module
|
||||
// cached sets the Disable-Module-Fetch: true header
|
||||
func Latest(modpath string, cached bool) (*Module, error) {
|
||||
latest, ok, err := Query(modpath, cached)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("module not found: %s", modpath)
|
||||
}
|
||||
for i := 0; i < 100; i++ {
|
||||
nextpath, ok := latest.NextMajorPath()
|
||||
if !ok {
|
||||
return latest, nil
|
||||
}
|
||||
next, ok, err := Query(nextpath, cached)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !ok {
|
||||
// handle the case where a project switched to modules
|
||||
// without incrementing the major version
|
||||
version := latest.MaxVersion("", true)
|
||||
if semver.Build(version) == "+incompatible" {
|
||||
nextpath = latest.WithMajorPath(semver.Major(version))
|
||||
if nextpath != latest.Path {
|
||||
next, ok, err = Query(nextpath, cached)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
return latest, nil
|
||||
}
|
||||
latest = next
|
||||
}
|
||||
return nil, fmt.Errorf("request limit exceeded")
|
||||
}
|
||||
|
||||
// QueryPackage tries to find the module path for the provided package path
|
||||
// it does so by repeatedly chopping off the last path element and trying to
|
||||
// use it as a path.
|
||||
func QueryPackage(pkgpath string, cached bool) (*Module, error) {
|
||||
prefix := pkgpath
|
||||
for prefix != "" {
|
||||
if module.CheckPath(prefix) == nil {
|
||||
mod, ok, err := Query(prefix, cached)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ok {
|
||||
modprefix := ModPrefix(mod.Path)
|
||||
if modpath, pkgdir, ok := SplitPath(modprefix, pkgpath); ok && modpath != mod.Path {
|
||||
if major, ok := ModMajor(modpath); ok {
|
||||
if v := mod.MaxVersion(major, false); v != "" {
|
||||
spec := JoinPath(modprefix, "", pkgdir) + "@" + v
|
||||
return nil, fmt.Errorf("%s doesn't support import versioning; use %s", major, spec)
|
||||
}
|
||||
return nil, fmt.Errorf("failed to find module for package: %s", pkgpath)
|
||||
}
|
||||
}
|
||||
return mod, nil
|
||||
}
|
||||
}
|
||||
remaining, last := path.Split(prefix)
|
||||
if last == "" {
|
||||
break
|
||||
}
|
||||
prefix = strings.TrimSuffix(remaining, "/")
|
||||
}
|
||||
return nil, fmt.Errorf("failed to find module for package: %s", pkgpath)
|
||||
}
|
||||
|
||||
// Update reports a newer version of a module.
|
||||
// The Err field will be set if an error occured.
|
||||
type Update struct {
|
||||
Err error
|
||||
Module module.Version
|
||||
Version string
|
||||
}
|
||||
|
||||
// UpdateOptions specifies a set of modules to check for updates.
|
||||
// The OnUpdate callback will be invoked with any updates found.
|
||||
type UpdateOptions struct {
|
||||
OnUpdate func(Update)
|
||||
Modules []module.Version
|
||||
Pre bool
|
||||
Cached bool
|
||||
Major bool // Major true compare only major
|
||||
UpMajor bool // UpMajor module up with major
|
||||
}
|
||||
|
||||
// Updates finds updates for a set of specified modules.
|
||||
func Updates(opt UpdateOptions) {
|
||||
ch := make(chan Update)
|
||||
go func() {
|
||||
defer close(ch)
|
||||
private := os.Getenv("GOPRIVATE")
|
||||
var group errgroup.Group
|
||||
if opt.Cached {
|
||||
group.SetLimit(3)
|
||||
} else {
|
||||
group.SetLimit(1)
|
||||
}
|
||||
for _, m := range opt.Modules {
|
||||
m := m
|
||||
if module.MatchPrefixPatterns(private, m.Path) {
|
||||
continue
|
||||
}
|
||||
group.Go(func() error {
|
||||
mod, err := Latest(m.Path, opt.Cached)
|
||||
if err != nil {
|
||||
ch <- Update{Module: m, Err: err}
|
||||
return nil
|
||||
}
|
||||
major := semver.Major(m.Version)
|
||||
var v string
|
||||
switch opt.UpMajor {
|
||||
case true:
|
||||
v = mod.MaxVersion("", opt.Pre)
|
||||
case false:
|
||||
v = mod.MaxVersion(major, opt.Pre)
|
||||
}
|
||||
if IsNewerVersion(m.Version, v, opt.Major) {
|
||||
ch <- Update{Module: m, Version: v}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
_ = group.Wait()
|
||||
}()
|
||||
for u := range ch {
|
||||
if opt.OnUpdate != nil {
|
||||
opt.OnUpdate(u)
|
||||
}
|
||||
}
|
||||
}
|
115
internal/modules/packages.go
Normal file
115
internal/modules/packages.go
Normal file
@ -0,0 +1,115 @@
|
||||
package modules
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/mod/module"
|
||||
"golang.org/x/mod/semver"
|
||||
)
|
||||
|
||||
// ModPrefix returns the module path with no SIV
|
||||
func ModPrefix(modpath string) string {
|
||||
prefix, _, ok := module.SplitPathVersion(modpath)
|
||||
if !ok {
|
||||
prefix = modpath
|
||||
}
|
||||
return prefix
|
||||
}
|
||||
|
||||
// ModMajor returns the major version in vN format
|
||||
func ModMajor(modpath string) (string, bool) {
|
||||
_, major, ok := module.SplitPathVersion(modpath)
|
||||
if ok {
|
||||
major = strings.TrimPrefix(major, "/")
|
||||
major = strings.TrimPrefix(major, ".")
|
||||
}
|
||||
return major, ok
|
||||
}
|
||||
|
||||
// SplitPath splits the package path into the module path and the package subdirectory.
|
||||
// It requires the a module path prefix to figure this out.
|
||||
func SplitPath(modprefix, pkgpath string) (modpath, pkgdir string, ok bool) {
|
||||
if !strings.HasPrefix(pkgpath, modprefix) {
|
||||
return "", "", false
|
||||
}
|
||||
modpathlen := len(modprefix)
|
||||
if rest := pkgpath[modpathlen:]; len(rest) > 0 && rest[0] != '/' && rest[0] != '.' {
|
||||
return "", "", false
|
||||
}
|
||||
if strings.HasPrefix(pkgpath[modpathlen:], "/") {
|
||||
modpathlen++
|
||||
}
|
||||
if idx := strings.Index(pkgpath[modpathlen:], "/"); idx >= 0 {
|
||||
modpathlen += idx
|
||||
} else {
|
||||
modpathlen = len(pkgpath)
|
||||
}
|
||||
modpath = modprefix
|
||||
if major, ok := ModMajor(pkgpath[:modpathlen]); ok {
|
||||
modpath = JoinPath(modprefix, major, "")
|
||||
}
|
||||
pkgdir = strings.TrimPrefix(pkgpath[len(modpath):], "/")
|
||||
return modpath, pkgdir, true
|
||||
}
|
||||
|
||||
// SplitSpec splits the path/to/package@query format strings
|
||||
func SplitSpec(spec string) (path, query string) {
|
||||
parts := strings.SplitN(spec, "@", 2)
|
||||
if len(parts) == 2 {
|
||||
path = parts[0]
|
||||
query = parts[1]
|
||||
} else {
|
||||
path = spec
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// JoinPath creates a full package path given a module prefix, version, and package directory.
|
||||
func JoinPath(modprefix, version, pkgdir string) string {
|
||||
version = strings.TrimPrefix(version, ".")
|
||||
version = strings.TrimPrefix(version, "/")
|
||||
major := semver.Major(version)
|
||||
pkgpath := modprefix
|
||||
switch {
|
||||
case strings.HasPrefix(pkgpath, "gopkg.in"):
|
||||
pkgpath += "." + major
|
||||
case major != "" && major != "v0" && major != "v1" && !strings.Contains(version, "+incompatible"):
|
||||
if !strings.HasSuffix(pkgpath, "/") {
|
||||
pkgpath += "/"
|
||||
}
|
||||
pkgpath += major
|
||||
}
|
||||
if pkgdir != "" {
|
||||
pkgpath += "/" + pkgdir
|
||||
}
|
||||
return pkgpath
|
||||
}
|
||||
|
||||
// FindModFile recursively searches up the directory structure until it
|
||||
// finds the go.mod, reaches the root of the directory tree, or encounters
|
||||
// an error.
|
||||
func FindModFile(dir string) (string, error) {
|
||||
var err error
|
||||
dir, err = filepath.Abs(dir)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for {
|
||||
name := filepath.Join(dir, "go.mod")
|
||||
_, err := os.Stat(name)
|
||||
if err == nil {
|
||||
return name, nil
|
||||
}
|
||||
if !os.IsNotExist(err) {
|
||||
return "", err
|
||||
}
|
||||
parent := filepath.Dir(dir)
|
||||
if parent == dir {
|
||||
return "", fmt.Errorf("cannot find go.mod")
|
||||
}
|
||||
dir = parent
|
||||
}
|
||||
}
|
55
internal/modules/packages_test.go
Normal file
55
internal/modules/packages_test.go
Normal file
@ -0,0 +1,55 @@
|
||||
package modules
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestModMajor(t *testing.T) {
|
||||
type args struct {
|
||||
modpath string
|
||||
}
|
||||
var tests = []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
want1 bool
|
||||
}{
|
||||
{"Test #1",
|
||||
args{
|
||||
"github.com/jackc/chunkreader/v2",
|
||||
},
|
||||
"v2",
|
||||
true,
|
||||
},
|
||||
{"Test #2",
|
||||
args{
|
||||
"github.com/jackc/chunkreader",
|
||||
},
|
||||
"",
|
||||
true,
|
||||
},
|
||||
{"Test #3",
|
||||
args{
|
||||
"gopkg.in/yaml.v2",
|
||||
},
|
||||
"v2",
|
||||
true,
|
||||
},
|
||||
{"Test #4",
|
||||
args{
|
||||
"github.com/jackc/chunkreader/v1",
|
||||
},
|
||||
"",
|
||||
false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, got1 := ModMajor(tt.args.modpath)
|
||||
if got != tt.want {
|
||||
t.Error("ModMajor() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
if got1 != tt.want1 {
|
||||
t.Error("ModMajor() got1 = %v, want %v", got1, tt.want1)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
48
internal/source/git/gogit.go
Normal file
48
internal/source/git/gogit.go
Normal file
@ -0,0 +1,48 @@
|
||||
//go:build gogit
|
||||
|
||||
package git
|
||||
|
||||
/*
|
||||
import "context"
|
||||
|
||||
type Repository interface {
|
||||
Checkout(ctx context.Context, hash string) error
|
||||
}
|
||||
|
||||
type repository struct {
|
||||
path string
|
||||
}
|
||||
|
||||
func NewRepositoryFromURL(ctx context.Context, url string) (Repository, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
|
||||
Branches() {
|
||||
refIter, err := repo.Branches() // получение веток
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to get branches", err)
|
||||
return err
|
||||
}
|
||||
|
||||
for {
|
||||
ref, err := refIter.Next()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
g.logger.Error(ctx, "ref iter error", err)
|
||||
return err
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("check %s == %s", ref.Name().Short(), branch))
|
||||
if ref.Name().Short() == branch {
|
||||
headRef = plumbing.NewHashReference(ref.Name(), ref.Hash())
|
||||
g.logger.Info(ctx, "headRef set to "+headRef.String())
|
||||
break
|
||||
}
|
||||
} // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
|
||||
|
||||
refIter.Close()
|
||||
}
|
||||
|
||||
*/
|
262
internal/source/git/nogogit.go
Normal file
262
internal/source/git/nogogit.go
Normal file
@ -0,0 +1,262 @@
|
||||
//go:build !gogit
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
)
|
||||
|
||||
type Repository interface {
|
||||
Branches() ([]*plumbing.Reference, error)
|
||||
// Auth(username string, password string) error
|
||||
FetchContext(ctx context.Context, opts *git.FetchOptions) error
|
||||
PushContext(ctx context.Context, opts *git.PushOptions) error
|
||||
Head() (*plumbing.Reference, error)
|
||||
Worktree() (Worktree, error)
|
||||
}
|
||||
|
||||
type Worktree interface {
|
||||
Checkout(*git.CheckoutOptions) error
|
||||
PullContext(ctx context.Context, opts *git.PullOptions) error
|
||||
Status() (git.Status, error)
|
||||
AddWithOptions(opts *git.AddOptions) error
|
||||
Commit(msg string, opts *git.CommitOptions) (plumbing.Hash, error)
|
||||
Reset(opts *git.ResetOptions) error
|
||||
}
|
||||
|
||||
type repository struct {
|
||||
gocmd string
|
||||
path string
|
||||
// authUsername string
|
||||
// authPassword string
|
||||
}
|
||||
|
||||
func PlainOpenWithOptions(path string, opts *git.PlainOpenOptions) (Repository, error) {
|
||||
gopath, err := exec.LookPath("git")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &repository{path: path, gocmd: gopath}, nil
|
||||
}
|
||||
|
||||
/*
|
||||
func (r *repository) Auth(username string, password string) error {
|
||||
r.authUsername = username
|
||||
r.authPassword = password
|
||||
return nil
|
||||
}
|
||||
*/
|
||||
|
||||
func (r *repository) Branches() ([]*plumbing.Reference, error) {
|
||||
var branches []*plumbing.Reference
|
||||
cmd := exec.Command(r.gocmd, "show-ref", "--branches")
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
br := bytes.NewBuffer(buf)
|
||||
for {
|
||||
line, err := br.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF && line == "" {
|
||||
break
|
||||
} else if err != io.EOF && line == "" {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) != 2 {
|
||||
return nil, fmt.Errorf("invalid fields %s", line)
|
||||
}
|
||||
branches = append(branches, plumbing.NewReferenceFromStrings(fields[1], fields[0]))
|
||||
}
|
||||
return branches, nil
|
||||
}
|
||||
|
||||
func (r *repository) FetchContext(ctx context.Context, opts *git.FetchOptions) error {
|
||||
args := []string{"fetch"}
|
||||
if opts.Force {
|
||||
args = append(args, "-f")
|
||||
}
|
||||
cmd := exec.CommandContext(ctx, r.gocmd, args...)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *repository) PushContext(ctx context.Context, opts *git.PushOptions) error {
|
||||
args := []string{"push"}
|
||||
|
||||
if opts.Force {
|
||||
args = append(args, "-f")
|
||||
}
|
||||
|
||||
/* TODO
|
||||
var refs []string
|
||||
for _, ref := range opts.RefSpecs {
|
||||
refs = append(refs, ref.String())
|
||||
}
|
||||
|
||||
args = append(args, strings.Join(refs, " "))
|
||||
*/
|
||||
cmd := exec.CommandContext(ctx, r.gocmd, args...)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *repository) Head() (*plumbing.Reference, error) {
|
||||
var head *plumbing.Reference
|
||||
cmd := exec.Command(r.gocmd, "symbolic-ref", "--short", "HEAD")
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
br := bytes.NewBuffer(buf)
|
||||
for {
|
||||
line, err := br.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF && line == "" {
|
||||
break
|
||||
} else if err != io.EOF && line == "" {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) != 2 {
|
||||
return nil, fmt.Errorf("invalid fields %s", line)
|
||||
}
|
||||
head = plumbing.NewReferenceFromStrings("HEAD", fields[0])
|
||||
}
|
||||
return head, nil
|
||||
}
|
||||
|
||||
type worktree struct {
|
||||
gocmd string
|
||||
}
|
||||
|
||||
func (r *repository) Worktree() (Worktree, error) {
|
||||
return &worktree{gocmd: r.gocmd}, nil
|
||||
}
|
||||
|
||||
func (w *worktree) Checkout(opts *git.CheckoutOptions) error {
|
||||
args := []string{"checkout"}
|
||||
if opts.Create {
|
||||
args = append(args, "-b", opts.Branch.Short())
|
||||
}
|
||||
if opts.Force {
|
||||
args = append(args, "-f")
|
||||
}
|
||||
if opts.Hash.IsZero() {
|
||||
args = append(args, opts.Branch.Short())
|
||||
} else {
|
||||
args = append(args, opts.Hash.String())
|
||||
}
|
||||
cmd := exec.Command(w.gocmd, args...)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *worktree) Status() (git.Status, error) {
|
||||
return git.Status{}, nil
|
||||
}
|
||||
|
||||
func (w *worktree) Reset(opts *git.ResetOptions) error {
|
||||
args := []string{"reset"}
|
||||
if opts.Mode == git.HardReset {
|
||||
args = append(args, "--hard")
|
||||
}
|
||||
|
||||
args = append(args, opts.Commit.String())
|
||||
|
||||
cmd := exec.Command(w.gocmd, args...)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = buf
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *worktree) Commit(msg string, opts *git.CommitOptions) (plumbing.Hash, error) {
|
||||
cmd := exec.Command(w.gocmd, `commit`,
|
||||
fmt.Sprintf(`--author="%s <%s>"`, opts.Author.Name, opts.Author.Email),
|
||||
"-m", msg,
|
||||
fmt.Sprintf(`--date="%s"`, opts.Author.When.Format(`Mon Jan _2 15:04:05 2006 -0700`)),
|
||||
)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
var head *plumbing.Reference
|
||||
cmd = exec.Command(w.gocmd, "show-ref", "HEAD")
|
||||
buf, err = cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, err
|
||||
}
|
||||
br := bytes.NewBuffer(buf)
|
||||
for {
|
||||
line, err := br.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF && line == "" {
|
||||
break
|
||||
} else if err != io.EOF && line == "" {
|
||||
return plumbing.ZeroHash, err
|
||||
}
|
||||
}
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) != 2 {
|
||||
return plumbing.ZeroHash, fmt.Errorf("invalid fields %s", line)
|
||||
}
|
||||
head = plumbing.NewReferenceFromStrings("HEAD", fields[0])
|
||||
}
|
||||
|
||||
return head.Hash(), nil
|
||||
}
|
||||
|
||||
func (w *worktree) PullContext(ctx context.Context, opts *git.PullOptions) error {
|
||||
args := []string{"pull"}
|
||||
if opts.Force {
|
||||
args = append(args, "-f")
|
||||
}
|
||||
if opts.Depth != 0 {
|
||||
args = append(args, fmt.Sprintf("--depth=%d", opts.Depth))
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(ctx, w.gocmd, args...)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *worktree) AddWithOptions(opts *git.AddOptions) error {
|
||||
cmd := exec.Command(w.gocmd, "add", opts.Path)
|
||||
buf, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("output %s error %w", buf, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
676
internal/source/gitea/gitea.go
Normal file
676
internal/source/gitea/gitea.go
Normal file
@ -0,0 +1,676 @@
|
||||
package gitea
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
gitconfig "github.com/go-git/go-git/v5/config"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
//gogit "go.unistack.org/pkgdash/internal/source/git"
|
||||
)
|
||||
|
||||
var ErrPRNotExist = errors.New("pull request does not exist")
|
||||
|
||||
type Gitea struct {
|
||||
logger logger.Logger
|
||||
URL string
|
||||
Username string
|
||||
Password string
|
||||
PRTitle string
|
||||
PRBody string
|
||||
Repository string
|
||||
Owner string
|
||||
pulls []*giteaPull
|
||||
}
|
||||
|
||||
func NewGitea(cfg configcli.Config, log logger.Logger) *Gitea {
|
||||
return &Gitea{
|
||||
logger: log,
|
||||
URL: cfg.Source.APIURL,
|
||||
Username: cfg.Source.Username,
|
||||
Password: cfg.Source.Password,
|
||||
PRTitle: cfg.PullRequestTitle,
|
||||
PRBody: cfg.PullRequestBody,
|
||||
Repository: cfg.Source.Repository,
|
||||
Owner: cfg.Source.Owner,
|
||||
}
|
||||
}
|
||||
|
||||
type giteaPull struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Base struct {
|
||||
Ref string `json:"ref"`
|
||||
} `json:"base"`
|
||||
Head struct {
|
||||
Ref string `json:"ref"`
|
||||
} `json:"head"`
|
||||
ID int64 `json:"id"`
|
||||
}
|
||||
|
||||
func (g *Gitea) Name() string {
|
||||
return "gitea"
|
||||
}
|
||||
|
||||
func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
|
||||
|
||||
var buf []byte
|
||||
var err error
|
||||
// создания шаблона названия для пулл реквеста
|
||||
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wTitle := bytes.NewBuffer(nil)
|
||||
// создания шаблона тела для пулл реквеста
|
||||
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wBody := bytes.NewBuffer(nil)
|
||||
|
||||
data := map[string]string{
|
||||
"Name": path,
|
||||
"VersionOld": mod.Module.Version,
|
||||
"VersionNew": mod.Version,
|
||||
}
|
||||
|
||||
if err = tplTitle.Execute(wTitle, data); err != nil {
|
||||
g.logger.Error(ctx, "failed to execute template", err)
|
||||
return err
|
||||
}
|
||||
if err = tplBody.Execute(wBody, data); err != nil {
|
||||
g.logger.Error(ctx, "failed to execute template", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// открытие гит репозитория с опцией обхода репозитория для нахождения .git
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
|
||||
}
|
||||
|
||||
wtree, headRef, err := g.fetchCheckout(ctx, repo, branch, path, mod)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to checkout", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = g.checkout(wtree, headRef)
|
||||
}()
|
||||
|
||||
if err = g.scopeUpdateDep(ctx, path, mod); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.mod")
|
||||
if err = wtree.AddWithOptions(&git.AddOptions{Path: "go.mod"}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.sum")
|
||||
if err = wtree.AddWithOptions(&git.AddOptions{Path: "go.sum"}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree commit")
|
||||
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
|
||||
Parents: []plumbing.Hash{headRef.Hash()},
|
||||
Author: &object.Signature{
|
||||
Name: "gitea-actions",
|
||||
Email: "info@unistack.org",
|
||||
When: time.Now(),
|
||||
},
|
||||
}) // хотим за коммитить изменения
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to commit: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
|
||||
|
||||
if err = repo.PushContext(ctx, &git.PushOptions{
|
||||
RefSpecs: []gitconfig.RefSpec{refspec},
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, "failed to push repo branch", err)
|
||||
return err
|
||||
} // пытаемся за пушить изменения
|
||||
|
||||
rsp, err := g.postPullRequest(ctx, wBody, wTitle, branch, path, mod)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Вроде создаем новый реквест на создание пулл реквеста
|
||||
if rsp.StatusCode != http.StatusCreated {
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
return fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitea) RequestClose(ctx context.Context, branch string, path string) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path))
|
||||
var err error
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
prExist := false
|
||||
var b string // Name of the branch to be deleted
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL))
|
||||
prExist = true
|
||||
b = pull.Head.Ref
|
||||
}
|
||||
}
|
||||
if !prExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
|
||||
return ErrPRNotExist
|
||||
}
|
||||
|
||||
req, err := g.DeleteBranch(ctx, g.URL, g.Owner, g.Repository, b, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err))
|
||||
return err
|
||||
}
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitea) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path))
|
||||
var err error
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
prExist := false
|
||||
var pullId int64
|
||||
var targetBranch plumbing.ReferenceName
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
|
||||
g.logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) // todo
|
||||
tVersion := getVersions(pull.Head.Ref) // Надо взять просто из названия ветки последнюю версию
|
||||
if !modules.IsNewerVersion(tVersion, mod.Version, false) {
|
||||
g.logger.Debug(ctx, "The existing PR is relevant")
|
||||
return nil
|
||||
}
|
||||
prExist = true
|
||||
pullId = pull.ID
|
||||
targetBranch = plumbing.ReferenceName(pull.Head.Ref)
|
||||
}
|
||||
}
|
||||
if !prExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
|
||||
return ErrPRNotExist
|
||||
}
|
||||
|
||||
// создания шаблона названия для пулл реквеста
|
||||
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wTitle := bytes.NewBuffer(nil)
|
||||
// создания шаблона тела для пулл реквеста
|
||||
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wBody := bytes.NewBuffer(nil)
|
||||
|
||||
data := map[string]string{
|
||||
"Name": path,
|
||||
"VersionOld": mod.Module.Version,
|
||||
"VersionNew": mod.Version,
|
||||
}
|
||||
|
||||
if err = tplTitle.Execute(wTitle, data); err != nil {
|
||||
g.logger.Error(ctx, "failed to execute template", err)
|
||||
return err
|
||||
}
|
||||
if err = tplBody.Execute(wBody, data); err != nil {
|
||||
g.logger.Error(ctx, "failed to execute template", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// открытие гит репозитория с опцией обхода репозитория для нахождения .git
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
|
||||
}
|
||||
|
||||
wtree, headRef, err := g.fetchCheckout(ctx, repo, targetBranch.Short(), path, mod)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to checkout", err)
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = g.checkout(wtree, headRef)
|
||||
}()
|
||||
|
||||
if err = g.scopeUpdateDep(ctx, path, mod); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.mod")
|
||||
if err = wtree.AddWithOptions(&git.AddOptions{Path: "go.mod"}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.sum")
|
||||
if err = wtree.AddWithOptions(&git.AddOptions{Path: "go.sum"}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree commit")
|
||||
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
|
||||
Parents: []plumbing.Hash{headRef.Hash()},
|
||||
Author: &object.Signature{
|
||||
Name: "gitea-actions",
|
||||
Email: "info@unistack.org",
|
||||
When: time.Now(),
|
||||
},
|
||||
}) // хотим за коммитить изменения
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to commit: %v", err))
|
||||
return err
|
||||
}
|
||||
|
||||
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
|
||||
|
||||
if err = repo.PushContext(ctx, &git.PushOptions{
|
||||
//RefSpecs: []gitconfig.RefSpec{refspec},
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, "failed to push repo branch", err)
|
||||
return err
|
||||
} // пытаемся за пушить изменения
|
||||
|
||||
err = g.patchPullRequest(ctx, wBody, wTitle, pullId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR update for %s-%s", path, mod.Version))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitea) RequestList(ctx context.Context, branch string) (map[string]string, error) {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
|
||||
var err error
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var path string
|
||||
rMap := make(map[string]string)
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { // добавляем только реквесты бота по обновлению модулей
|
||||
continue
|
||||
}
|
||||
path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
|
||||
rMap[path] = pull.Title
|
||||
}
|
||||
return rMap, nil
|
||||
}
|
||||
|
||||
func getVersions(s string) string {
|
||||
re := regexp.MustCompile("[vV][0-9]+\\.[0-9]+\\.[0-9]+")
|
||||
|
||||
version := re.FindString(s)
|
||||
|
||||
return version
|
||||
}
|
||||
|
||||
func (g *Gitea) DeleteBranch(ctx context.Context, url, owner, repo, branch, password string) (*http.Request, error) {
|
||||
var buf []byte
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v1/repos/%s/%s/branches/%s", url, owner, repo, branch), bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+password)
|
||||
return req, err
|
||||
}
|
||||
|
||||
func (g *Gitea) GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaPull, error) {
|
||||
var pullsAll []*giteaPull
|
||||
page := 1
|
||||
|
||||
for {
|
||||
pulls := make([]*giteaPull, 0, 10)
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodGet,
|
||||
fmt.Sprintf("https://%s/api/v1/repos/%s/%s/pulls?state=open&page=%v", url, owner, repo, page),
|
||||
nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} // вроде запроса к репозиторию
|
||||
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req) // выполнение запроса
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf, _ := io.ReadAll(rsp.Body)
|
||||
|
||||
switch rsp.StatusCode {
|
||||
case http.StatusOK:
|
||||
if err = json.Unmarshal(buf, &pulls); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
|
||||
return nil, err
|
||||
}
|
||||
pullsAll = append(pullsAll, pulls...)
|
||||
page++
|
||||
case http.StatusNotFound:
|
||||
g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo))
|
||||
return nil, ErrPRNotExist
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
if len(pulls) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return pullsAll, nil
|
||||
}
|
||||
|
||||
func (g *Gitea) checkout(w *git.Worktree, ref *plumbing.Reference) error {
|
||||
ctx := context.Background()
|
||||
g.logger.Debug(ctx, "checkout: "+ref.String())
|
||||
|
||||
if err := w.Checkout(&git.CheckoutOptions{
|
||||
Branch: ref.Name(),
|
||||
Create: false,
|
||||
Force: true,
|
||||
Keep: false,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, "failed to reset", err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g Gitea) fetchCheckout(ctx context.Context, repo *git.Repository, branch, path string, mod modules.Update) (*git.Worktree, *plumbing.Reference, error) {
|
||||
// обновляем ветки
|
||||
if err := repo.FetchContext(ctx, &git.FetchOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Error(ctx, "failed to fetch repo", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var headRef *plumbing.Reference
|
||||
|
||||
branches, err := repo.Branches()
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "cant get repo branch", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
for {
|
||||
ref, err := branches.Next()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if ref.Name().Short() == branch {
|
||||
//Получаем ссылку на нужную ветку
|
||||
headRef = ref
|
||||
g.logger.Info(ctx, "headRef set to "+headRef.String())
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if headRef == nil {
|
||||
g.logger.Error(ctx, "failed to get repo branch head")
|
||||
return nil, nil, err
|
||||
} // Не получили нужную ветку
|
||||
|
||||
g.logger.Info(ctx, "repo head "+headRef.String())
|
||||
|
||||
wtree, err := repo.Worktree()
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to get worktree", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if err = wtree.Reset(&git.ResetOptions{
|
||||
Mode: git.HardReset,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, "reset work_tree error: ", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil && err != ErrPRNotExist {
|
||||
g.logger.Error(ctx, "GetPulls error", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var pullExist bool
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) && (strings.Contains(pull.Base.Ref, branch) || strings.Contains(pull.Head.Ref, branch)) {
|
||||
pullExist = true
|
||||
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
|
||||
wstatus, err := wtree.Status()
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to get worktree status", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree status "+wstatus.String())
|
||||
|
||||
if err = wtree.PullContext(ctx, &git.PullOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
// Depth: 1,
|
||||
// RemoteURL :
|
||||
ReferenceName: headRef.Name(),
|
||||
Force: true,
|
||||
RemoteName: "origin",
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
|
||||
|
||||
if pullExist {
|
||||
if err = wtree.Checkout(&git.CheckoutOptions{
|
||||
Branch: headRef.Name(),
|
||||
Create: false,
|
||||
Force: true,
|
||||
}); err != nil && err != git.ErrBranchExists {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
|
||||
return nil, nil, err
|
||||
} //переходим на существующею
|
||||
} else {
|
||||
if err = wtree.Checkout(&git.CheckoutOptions{
|
||||
Hash: headRef.Hash(),
|
||||
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
|
||||
Create: true,
|
||||
Force: true,
|
||||
}); err != nil && err != git.ErrBranchExists {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
|
||||
return nil, nil, err
|
||||
} // создаем новую ветку
|
||||
}
|
||||
return wtree, headRef, nil
|
||||
}
|
||||
|
||||
func (g *Gitea) postPullRequest(ctx context.Context, wBody, wTitle *bytes.Buffer, branch, path string, mod modules.Update) (*http.Response, error) {
|
||||
body := map[string]string{
|
||||
"base": branch,
|
||||
"body": wBody.String(),
|
||||
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
|
||||
"title": wTitle.String(),
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
|
||||
|
||||
buf, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to marshal", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodPost,
|
||||
fmt.Sprintf("https://%s/api/v1/repos/%s/%s/pulls", g.URL, g.Owner, g.Repository),
|
||||
bytes.NewReader(buf),
|
||||
)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "http request error", err)
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+g.Password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to call http request", err)
|
||||
return rsp, err
|
||||
}
|
||||
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (g *Gitea) patchPullRequest(ctx context.Context, wBody, wTitle *bytes.Buffer, indexPR int64) error {
|
||||
body := map[string]string{
|
||||
"body": wBody.String(),
|
||||
"title": wTitle.String(),
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
|
||||
|
||||
buf, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to marshal", err)
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodPatch,
|
||||
fmt.Sprintf("https://%s/api/v1/repos/%s/%s/pulls/%d", g.URL, g.Owner, g.Repository, indexPR),
|
||||
bytes.NewReader(buf),
|
||||
)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "http request error", err)
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+g.Password)
|
||||
|
||||
_, err = http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, "failed to call http request", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitea) scopeUpdateDep(ctx context.Context, path string, mod modules.Update) error {
|
||||
epath, err := exec.LookPath("go")
|
||||
if errors.Is(err, exec.ErrDot) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
|
||||
} // ищем go файл
|
||||
|
||||
var cmd *exec.Cmd
|
||||
var out []byte
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
return err
|
||||
}
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
return err
|
||||
} // пытаемся выполнить команду go mod edit с новой версией модуля
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
|
||||
return err
|
||||
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
|
||||
|
||||
return nil
|
||||
}
|
396
internal/source/github/github.go
Normal file
396
internal/source/github/github.go
Normal file
@ -0,0 +1,396 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
gitconfig "github.com/go-git/go-git/v5/config"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
)
|
||||
|
||||
var ErrPRNotExist = errors.New("pull request does not exist")
|
||||
|
||||
type Github struct {
|
||||
logger logger.Logger
|
||||
URL string
|
||||
Username string
|
||||
Password string
|
||||
PRTitle string
|
||||
PRBody string
|
||||
Repository string
|
||||
Owner string
|
||||
pulls []*githubPull
|
||||
baseRef *plumbing.Reference
|
||||
}
|
||||
|
||||
func NewGithub(cfg configcli.Config, log logger.Logger) *Github {
|
||||
return &Github{
|
||||
logger: log,
|
||||
URL: cfg.Source.APIURL,
|
||||
Username: cfg.Source.Username,
|
||||
Password: cfg.Source.Password,
|
||||
PRTitle: cfg.PullRequestTitle,
|
||||
PRBody: cfg.PullRequestBody,
|
||||
Repository: cfg.Source.Repository,
|
||||
Owner: cfg.Source.Owner,
|
||||
}
|
||||
}
|
||||
|
||||
type githubPull struct {
|
||||
URL string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
Base struct {
|
||||
Ref string `json:"ref"`
|
||||
} `json:"base"`
|
||||
Head struct {
|
||||
Ref string `json:"ref"`
|
||||
} `json:"head"`
|
||||
ID int64 `json:"id"`
|
||||
}
|
||||
|
||||
func (g *Github) Name() string {
|
||||
return "github"
|
||||
}
|
||||
|
||||
func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
|
||||
|
||||
var buf []byte
|
||||
var err error
|
||||
// создания шаблона названия для пулл реквеста
|
||||
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wTitle := bytes.NewBuffer(nil)
|
||||
// создания шаблона тела для пулл реквеста
|
||||
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wBody := bytes.NewBuffer(nil)
|
||||
|
||||
data := map[string]string{
|
||||
"Name": path,
|
||||
"VersionOld": mod.Module.Version,
|
||||
"VersionNew": mod.Version,
|
||||
}
|
||||
|
||||
if err = tplTitle.Execute(wTitle, data); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
|
||||
}
|
||||
if err = tplBody.Execute(wBody, data); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
|
||||
}
|
||||
|
||||
// открытие гит репозитория с опцией обхода репозитория для нахождения .git
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
|
||||
}
|
||||
// извлекаем ссылки с объектами из удаленного объекта??
|
||||
if err = repo.FetchContext(ctx, &git.FetchOptions{
|
||||
// Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err))
|
||||
} // обновляем репозиторий
|
||||
|
||||
var headRef *plumbing.Reference // вроде ссылка на гит
|
||||
|
||||
if g.baseRef == nil {
|
||||
g.baseRef, err = repo.Head()
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
refIter, err := repo.Branches() // получение веток
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err))
|
||||
return err
|
||||
}
|
||||
for {
|
||||
ref, err := refIter.Next()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if ref.Name().Short() == branch { // todo вот тут возможно нужно переделать
|
||||
headRef = ref
|
||||
break
|
||||
}
|
||||
} // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
|
||||
refIter.Close()
|
||||
|
||||
if headRef == nil {
|
||||
g.logger.Fatal(ctx, "failed to get repo branch head")
|
||||
return err
|
||||
} // Не получили нужную ветку
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("repo head %s", headRef))
|
||||
|
||||
wtree, err := repo.Worktree() // todo вроде рабочее дерево не нужно
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err))
|
||||
}
|
||||
defer g.checkout(*wtree, *g.baseRef)
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil && err != ErrPRNotExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) && strings.Contains(pull.Base.Ref, branch) {
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL))
|
||||
return g.RequestUpdate(ctx, branch, path, mod)
|
||||
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
|
||||
g.logger.Info(ctx, "reset worktree")
|
||||
if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err))
|
||||
} // вроде меняем ветку todo вроде можно удалить
|
||||
|
||||
if err = wtree.PullContext(ctx, &git.PullOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Depth: 1,
|
||||
// RemoteURL :
|
||||
Force: true,
|
||||
RemoteName: "origin",
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
|
||||
if err = wtree.Checkout(&git.CheckoutOptions{
|
||||
Hash: headRef.Hash(),
|
||||
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
|
||||
Create: true,
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
|
||||
return err
|
||||
} // создаем новую ветку
|
||||
|
||||
epath, err := exec.LookPath("go")
|
||||
if errors.Is(err, exec.ErrDot) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
|
||||
} // ищем go файл
|
||||
|
||||
var cmd *exec.Cmd
|
||||
var out []byte
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
}
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
} // пытаемся выполнить команду go mod edit с новой версией модуля
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
|
||||
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.mod")
|
||||
if _, err = wtree.Add("go.mod"); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.sum")
|
||||
if _, err = wtree.Add("go.sum"); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree commit")
|
||||
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
|
||||
Parents: []plumbing.Hash{headRef.Hash()},
|
||||
Author: &object.Signature{
|
||||
Name: "gitea-actions",
|
||||
Email: "info@unistack.org",
|
||||
When: time.Now(),
|
||||
},
|
||||
}) // хотим за коммитить изменения
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err))
|
||||
}
|
||||
|
||||
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
|
||||
|
||||
if err = repo.PushContext(ctx, &git.PushOptions{
|
||||
RefSpecs: []gitconfig.RefSpec{refspec},
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err))
|
||||
} // пытаемся за пушить изменения
|
||||
|
||||
body := map[string]string{
|
||||
"base": branch,
|
||||
"body": wBody.String(),
|
||||
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
|
||||
"title": wTitle.String(),
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
|
||||
|
||||
buf, err = json.Marshal(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
|
||||
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodPost,
|
||||
fmt.Sprintf("https://%s/api/v1/repos/%s/%s/pulls", g.URL, g.Owner, g.Repository),
|
||||
bytes.NewReader(buf),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+g.Password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
} // Вроде создаем новый реквест на создание пулл реквеста
|
||||
if rsp.StatusCode != http.StatusCreated {
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
return fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
|
||||
|
||||
repo, err = git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Github) RequestClose(ctx context.Context, branch string, path string) error {
|
||||
return fmt.Errorf("implement me")
|
||||
}
|
||||
|
||||
func (g *Github) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
return fmt.Errorf("implement me")
|
||||
}
|
||||
|
||||
func (g *Github) RequestList(ctx context.Context, branch string) (map[string]string, error) {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
|
||||
var err error
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var path string
|
||||
rMap := make(map[string]string)
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { // добавляем только реквесты бота по обновлению модулей
|
||||
continue
|
||||
}
|
||||
path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
|
||||
rMap[path] = pull.Title
|
||||
}
|
||||
return rMap, nil
|
||||
}
|
||||
|
||||
func (g *Github) GetPulls(ctx context.Context, url, owner, repo, password string) ([]*githubPull, error) {
|
||||
var pullsAll []*githubPull
|
||||
page := 1
|
||||
|
||||
for {
|
||||
pulls := make([]*githubPull, 0, 10)
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodGet,
|
||||
fmt.Sprintf("https://%s/api/v1/repos/%s/%s/pulls?state=open&page=%v", url, owner, repo, page),
|
||||
nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} // вроде запроса к репозиторию
|
||||
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req) // выполнение запроса
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf, _ := io.ReadAll(rsp.Body)
|
||||
|
||||
switch rsp.StatusCode {
|
||||
case http.StatusOK:
|
||||
if err = json.Unmarshal(buf, &pulls); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
|
||||
return nil, err
|
||||
}
|
||||
pullsAll = append(pullsAll, pulls...)
|
||||
page++
|
||||
case http.StatusNotFound:
|
||||
g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo))
|
||||
return nil, ErrPRNotExist
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
if len(pulls) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return pullsAll, nil
|
||||
}
|
||||
|
||||
func (g *Github) checkout(w git.Worktree, ref plumbing.Reference) {
|
||||
ctx := context.Background()
|
||||
g.logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short()))
|
||||
|
||||
if err := w.Checkout(&git.CheckoutOptions{
|
||||
Branch: ref.Name(),
|
||||
Create: false,
|
||||
Force: true,
|
||||
Keep: false,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err))
|
||||
}
|
||||
}
|
522
internal/source/gitlab/gitlab.go
Normal file
522
internal/source/gitlab/gitlab.go
Normal file
@ -0,0 +1,522 @@
|
||||
package gitlab
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
gitconfig "github.com/go-git/go-git/v5/config"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
)
|
||||
|
||||
var ErrPRNotExist = errors.New("pull request does not exist")
|
||||
|
||||
type Gitlab struct {
|
||||
logger logger.Logger
|
||||
URL string
|
||||
Username string
|
||||
Password string
|
||||
PRTitle string
|
||||
PRBody string
|
||||
Repository string
|
||||
RepositoryId string
|
||||
Owner string
|
||||
pulls []*gitlabPull
|
||||
baseRef *plumbing.Reference
|
||||
}
|
||||
|
||||
func NewGitlab(cfg configcli.Config, log logger.Logger) *Gitlab {
|
||||
return &Gitlab{
|
||||
logger: log,
|
||||
URL: cfg.Source.APIURL,
|
||||
Username: cfg.Source.Username,
|
||||
Password: cfg.Source.Password,
|
||||
PRTitle: cfg.PullRequestTitle,
|
||||
PRBody: cfg.PullRequestBody,
|
||||
Repository: cfg.Source.Repository,
|
||||
Owner: cfg.Source.Owner,
|
||||
}
|
||||
}
|
||||
|
||||
type gitlabPull struct {
|
||||
URL string `json:"web_url"`
|
||||
Title string `json:"title"`
|
||||
Target string `json:"target_branch"`
|
||||
Source string `json:"source_branch"`
|
||||
ID int64 `json:"id"`
|
||||
}
|
||||
|
||||
type gitlabProject struct {
|
||||
Id int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
func (g *Gitlab) Name() string {
|
||||
return "gitlab"
|
||||
}
|
||||
|
||||
func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
|
||||
|
||||
var buf []byte
|
||||
var err error
|
||||
// создания шаблона названия для пулл реквеста
|
||||
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wTitle := bytes.NewBuffer(nil)
|
||||
// создания шаблона тела для пулл реквеста
|
||||
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
|
||||
}
|
||||
|
||||
wBody := bytes.NewBuffer(nil)
|
||||
|
||||
data := map[string]string{
|
||||
"Name": path,
|
||||
"VersionOld": mod.Module.Version,
|
||||
"VersionNew": mod.Version,
|
||||
}
|
||||
|
||||
if err = tplTitle.Execute(wTitle, data); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
|
||||
}
|
||||
if err = tplBody.Execute(wBody, data); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
|
||||
}
|
||||
|
||||
// открытие гит репозитория с опцией обхода репозитория для нахождения .git
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
|
||||
}
|
||||
// извлекаем ссылки с объектами из удаленного объекта??
|
||||
if err = repo.FetchContext(ctx, &git.FetchOptions{
|
||||
// Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err))
|
||||
} // обновляем репозиторий
|
||||
|
||||
var headRef *plumbing.Reference // вроде ссылка на гит
|
||||
|
||||
if g.baseRef == nil {
|
||||
g.baseRef, err = repo.Head()
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
refIter, err := repo.Branches() // получение веток
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err))
|
||||
}
|
||||
for {
|
||||
ref, err := refIter.Next()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if ref.Name().Short() == branch { // todo вот тут возможно нужно переделать
|
||||
headRef = ref
|
||||
break
|
||||
}
|
||||
} // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
|
||||
refIter.Close()
|
||||
|
||||
if headRef == nil {
|
||||
g.logger.Fatal(ctx, "failed to get repo branch head")
|
||||
return err
|
||||
} // Не получили нужную ветку
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("repo head %s", headRef))
|
||||
|
||||
wtree, err := repo.Worktree() // todo вроде рабочее дерево не нужно
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err))
|
||||
}
|
||||
defer g.checkout(*wtree, *g.baseRef)
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
|
||||
if err != nil && err != ErrPRNotExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) {
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL))
|
||||
return g.RequestUpdate(ctx, branch, path, mod)
|
||||
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
|
||||
|
||||
sourceBranch := fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)
|
||||
|
||||
g.logger.Info(ctx, "reset worktree")
|
||||
if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err))
|
||||
}
|
||||
|
||||
if err = wtree.PullContext(ctx, &git.PullOptions{
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Depth: 1,
|
||||
// RemoteURL :
|
||||
Force: true,
|
||||
RemoteName: "origin",
|
||||
}); err != nil && err != git.NoErrAlreadyUpToDate {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
|
||||
if err = wtree.Checkout(&git.CheckoutOptions{
|
||||
Hash: headRef.Hash(),
|
||||
Branch: plumbing.NewBranchReferenceName(sourceBranch),
|
||||
Create: true,
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
|
||||
return err
|
||||
} // создаем новую ветку
|
||||
|
||||
epath, err := exec.LookPath("go")
|
||||
if errors.Is(err, exec.ErrDot) {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
|
||||
} // ищем go файл
|
||||
|
||||
var cmd *exec.Cmd
|
||||
var out []byte
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
}
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
|
||||
} // пытаемся выполнить команду go mod edit с новой версией модуля
|
||||
|
||||
cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
|
||||
if out, err = cmd.CombinedOutput(); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
|
||||
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.mod")
|
||||
if _, err = wtree.Add("go.mod"); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree add go.sum")
|
||||
if _, err = wtree.Add("go.sum"); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, "worktree commit")
|
||||
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
|
||||
Parents: []plumbing.Hash{headRef.Hash()},
|
||||
Author: &object.Signature{
|
||||
Name: "gitea-actions",
|
||||
Email: "info@unistack.org",
|
||||
When: time.Now(),
|
||||
},
|
||||
}) // хотим за коммитить изменения
|
||||
if err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err))
|
||||
}
|
||||
|
||||
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
|
||||
|
||||
if err = repo.PushContext(ctx, &git.PushOptions{
|
||||
RefSpecs: []gitconfig.RefSpec{refspec},
|
||||
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
|
||||
Force: true,
|
||||
}); err != nil {
|
||||
g.logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err))
|
||||
} // пытаемся за пушить изменения
|
||||
|
||||
body := map[string]string{
|
||||
"id": g.RepositoryId,
|
||||
"source_branch": sourceBranch,
|
||||
"target_branch": branch,
|
||||
"title": wTitle.String(),
|
||||
"description": wBody.String(),
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
|
||||
|
||||
buf, err = json.Marshal(body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
|
||||
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodPost,
|
||||
fmt.Sprintf("https://%s/api/v4/projects/%s/merge_requests", g.URL, g.RepositoryId),
|
||||
bytes.NewReader(buf),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+g.Password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
} // Вроде создаем новый реквест на создание пулл реквеста
|
||||
if rsp.StatusCode != http.StatusCreated {
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
return fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitlab) RequestClose(ctx context.Context, branch string, path string) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path))
|
||||
var err error
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
prExist := false
|
||||
var b string // Name of the branch to be deleted
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) {
|
||||
g.logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL))
|
||||
prExist = true
|
||||
b = pull.Source
|
||||
}
|
||||
}
|
||||
if !prExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
|
||||
return ErrPRNotExist
|
||||
}
|
||||
|
||||
req, err := g.DeleteBranch(ctx, g.URL, g.RepositoryId, b, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err))
|
||||
return err
|
||||
}
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
|
||||
return err
|
||||
}
|
||||
|
||||
g.logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gitlab) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path))
|
||||
var err error
|
||||
|
||||
g.RepositoryId, err = g.GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil || g.RepositoryId == "" {
|
||||
return fmt.Errorf("project id is empty")
|
||||
}
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return err
|
||||
}
|
||||
|
||||
prExist := false
|
||||
for _, pull := range g.pulls {
|
||||
if strings.Contains(pull.Title, path) {
|
||||
g.logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) // todo
|
||||
tVersion := getVersions(pull.Source) // Надо взять просто из названия ветки последнюю версию
|
||||
if modules.IsNewerVersion(tVersion, mod.Version, false) {
|
||||
reqDel, err := g.DeleteBranch(ctx, g.URL, g.RepositoryId, pull.Source, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("Error with create request for branch: %s, err: %s", branch, err))
|
||||
return err
|
||||
}
|
||||
rsp, err := http.DefaultClient.Do(reqDel)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("Error with do request for branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
|
||||
return err
|
||||
}
|
||||
g.logger.Info(ctx, fmt.Sprintf("Old pr %s successful delete", pull.Source))
|
||||
} else {
|
||||
g.logger.Debug(ctx, "The existing PR is relevant")
|
||||
return nil
|
||||
}
|
||||
prExist = true
|
||||
}
|
||||
}
|
||||
if !prExist {
|
||||
g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
|
||||
return ErrPRNotExist
|
||||
}
|
||||
|
||||
return g.RequestOpen(ctx, branch, path, mod) // todo это мне не нравится
|
||||
}
|
||||
|
||||
func (g *Gitlab) RequestList(ctx context.Context, branch string) (map[string]string, error) {
|
||||
g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
|
||||
var err error
|
||||
|
||||
g.RepositoryId, err = g.GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password)
|
||||
if err != nil || g.RepositoryId == "" {
|
||||
return nil, fmt.Errorf("project id is empty")
|
||||
}
|
||||
|
||||
g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
|
||||
if err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var path string
|
||||
rMap := make(map[string]string)
|
||||
|
||||
for _, pull := range g.pulls {
|
||||
if !strings.HasPrefix(pull.Title, "Bump ") { // добавляем только реквесты бота по обновлению модулей
|
||||
continue
|
||||
}
|
||||
path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
|
||||
rMap[path] = pull.Title
|
||||
}
|
||||
return rMap, nil
|
||||
}
|
||||
|
||||
func getVersions(s string) string {
|
||||
re := regexp.MustCompile("[vV][0-9]+\\.[0-9]+\\.[0-9]+")
|
||||
|
||||
version := re.FindString(s)
|
||||
|
||||
return version
|
||||
}
|
||||
|
||||
func (g *Gitlab) DeleteBranch(ctx context.Context, url, projectId, branch, password string) (*http.Request, error) {
|
||||
var buf []byte
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v4/projects/%s/repository/branches/%s", url, projectId, branch), bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+password)
|
||||
return req, err
|
||||
}
|
||||
|
||||
func (g *Gitlab) GetPulls(ctx context.Context, url, projectId, branch, password string) ([]*gitlabPull, error) {
|
||||
pulls := make([]*gitlabPull, 0, 10)
|
||||
req, err := http.NewRequestWithContext(
|
||||
ctx,
|
||||
http.MethodGet,
|
||||
fmt.Sprintf("https://%s/api/v4/projects/%s/merge_requests?state=opened&target_branch=%s", url, projectId, branch),
|
||||
nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} // вроде запроса к репозиторию
|
||||
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer "+password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req) // выполнение запроса
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf, _ := io.ReadAll(rsp.Body)
|
||||
|
||||
switch rsp.StatusCode {
|
||||
case http.StatusOK:
|
||||
if err = json.Unmarshal(buf, &pulls); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
|
||||
return nil, err
|
||||
}
|
||||
return pulls, nil
|
||||
case http.StatusNotFound:
|
||||
g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", projectId))
|
||||
return nil, ErrPRNotExist
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Gitlab) GetRepoID(ctx context.Context, url, owner, repo, password string) (rId string, err error) {
|
||||
var buf []byte
|
||||
projects := make([]*gitlabProject, 0, 10)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, fmt.Sprintf("https://%s/api/v4/users/%s/projects?owned=true", url, owner), nil)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
req.Header.Add("Accept", "application/json")
|
||||
req.Header.Add("Content-Type", "application/json")
|
||||
req.Header.Add("Authorization", password)
|
||||
|
||||
rsp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
buf, _ = io.ReadAll(rsp.Body)
|
||||
|
||||
switch rsp.StatusCode {
|
||||
case http.StatusOK:
|
||||
if err = json.Unmarshal(buf, &projects); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
|
||||
}
|
||||
for _, p := range projects {
|
||||
if p.Name == repo {
|
||||
rId = strconv.Itoa(int(p.Id))
|
||||
}
|
||||
}
|
||||
return
|
||||
default:
|
||||
return rId, fmt.Errorf("unknown error: %s", buf)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Gitlab) checkout(w git.Worktree, ref plumbing.Reference) {
|
||||
ctx := context.Background()
|
||||
g.logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short()))
|
||||
|
||||
if err := w.Checkout(&git.CheckoutOptions{
|
||||
Branch: ref.Name(),
|
||||
Create: false,
|
||||
Force: true,
|
||||
Keep: false,
|
||||
}); err != nil {
|
||||
g.logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err))
|
||||
}
|
||||
}
|
44
internal/source/gogs/gogs.go
Normal file
44
internal/source/gogs/gogs.go
Normal file
@ -0,0 +1,44 @@
|
||||
package gogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
)
|
||||
|
||||
type Gogs struct {
|
||||
logger logger.Logger
|
||||
Username string
|
||||
Password string
|
||||
}
|
||||
|
||||
func NewGogs(cfg configcli.Config, log logger.Logger) *Gogs {
|
||||
return &Gogs{
|
||||
logger: log,
|
||||
Username: cfg.Source.Username,
|
||||
Password: cfg.Source.Password,
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Gogs) Name() string {
|
||||
return "gogs"
|
||||
}
|
||||
|
||||
func (g *Gogs) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
return fmt.Errorf("implement me")
|
||||
}
|
||||
|
||||
func (g *Gogs) RequestClose(ctx context.Context, branch string, path string) error {
|
||||
return fmt.Errorf("implement me")
|
||||
}
|
||||
|
||||
func (g *Gogs) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
|
||||
return fmt.Errorf("implement me")
|
||||
}
|
||||
|
||||
func (g *Gogs) RequestList(ctx context.Context, branch string) (map[string]string, error) {
|
||||
return nil, fmt.Errorf("implement me")
|
||||
}
|
35
internal/source/source.go
Normal file
35
internal/source/source.go
Normal file
@ -0,0 +1,35 @@
|
||||
package source
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/configcli"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
"go.unistack.org/pkgdash/internal/source/gitea"
|
||||
"go.unistack.org/pkgdash/internal/source/github"
|
||||
"go.unistack.org/pkgdash/internal/source/gitlab"
|
||||
"go.unistack.org/pkgdash/internal/source/gogs"
|
||||
)
|
||||
|
||||
type SourceControl interface {
|
||||
Name() string
|
||||
RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error
|
||||
RequestClose(ctx context.Context, branch string, path string) error
|
||||
RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error
|
||||
RequestList(ctx context.Context, branch string) (map[string]string, error)
|
||||
}
|
||||
|
||||
func NewSourceControl(cfg configcli.Config, log logger.Logger) SourceControl {
|
||||
switch cfg.Source.TypeGit {
|
||||
case "github":
|
||||
return github.NewGithub(cfg, log)
|
||||
case "gitlab":
|
||||
return gitlab.NewGitlab(cfg, log)
|
||||
case "gitea":
|
||||
return gitea.NewGitea(cfg, log)
|
||||
case "gogs":
|
||||
return gogs.NewGogs(cfg, log)
|
||||
}
|
||||
return nil
|
||||
}
|
34
internal/storage/postgres_tmp/queries.go
Normal file
34
internal/storage/postgres_tmp/queries.go
Normal file
@ -0,0 +1,34 @@
|
||||
//go:build ignore
|
||||
|
||||
package postgres
|
||||
|
||||
const (
|
||||
queryListPackage = `
|
||||
select
|
||||
id,
|
||||
name,
|
||||
url,
|
||||
comments
|
||||
--modules,
|
||||
--issues,
|
||||
from package;
|
||||
`
|
||||
queryAddComment = `
|
||||
with insert_comm as (
|
||||
insert into comment(text) values ($1) returning id
|
||||
)
|
||||
update package set comments = array_append(comments, (select * from insert_comm)) where id=$2;
|
||||
`
|
||||
queryAddPackage = `
|
||||
insert into package(name, url, modules) values ($1, $2, $3);
|
||||
`
|
||||
queryInsMsgGetIDs = `
|
||||
insert into module(name, version, last_version) values
|
||||
%s
|
||||
returning id;
|
||||
`
|
||||
queryGetModule = `
|
||||
select id, name, version, last_version from module
|
||||
where id in %s ;
|
||||
`
|
||||
)
|
235
internal/storage/postgres_tmp/storage.go
Normal file
235
internal/storage/postgres_tmp/storage.go
Normal file
@ -0,0 +1,235 @@
|
||||
//go:build ignore
|
||||
|
||||
package postgres
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/golang-migrate/migrate/v3"
|
||||
mpgx "github.com/golang-migrate/migrate/v4/database/pgx"
|
||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||
"github.com/lib/pq"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/config"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
const (
|
||||
pathMigration = `migrations/postgres`
|
||||
)
|
||||
|
||||
type Postgres struct {
|
||||
db *sql.DB
|
||||
fs embed.FS
|
||||
}
|
||||
|
||||
func NewStorage() func(*sql.DB, embed.FS) interface{} {
|
||||
return func(db *sql.DB, fs embed.FS) interface{} {
|
||||
return &Postgres{db: db, fs: fs}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Postgres) MigrateUp() error {
|
||||
driver, err := mpgx.WithInstance(s.db, &mpgx.Config{
|
||||
MigrationsTable: mpgx.DefaultMigrationsTable,
|
||||
DatabaseName: config.ServiceName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
source, err := iofs.New(s.fs, pathMigration)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: pass own logger
|
||||
m, err := migrate.NewWithInstance("fs", source, config.ServiceName, driver)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = m.Up(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Postgres) MigrateDown() error {
|
||||
driver, err := mpgx.WithInstance(s.db, &mpgx.Config{
|
||||
MigrationsTable: mpgx.DefaultMigrationsTable,
|
||||
DatabaseName: config.ServiceName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
source, err := iofs.New(s.fs, pathMigration)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: pass own logger
|
||||
m, err := migrate.NewWithInstance("fs", source, config.ServiceName, driver)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = m.Down(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Postgres) PackagesUpdate(ctx context.Context, req *pb.PackagesUpdateReq) error {
|
||||
panic("need implement")
|
||||
}
|
||||
|
||||
func (s *Postgres) PackagesList(ctx context.Context, req *pb.PackagesListReq) (models.ListPackage, error) {
|
||||
rows, err := s.db.QueryContext(ctx, queryListPackage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err = rows.Close(); err != nil {
|
||||
return
|
||||
}
|
||||
err = rows.Err()
|
||||
}()
|
||||
|
||||
result := make([]*models.Package, 0)
|
||||
for rows.Next() {
|
||||
tmp := &models.Package{}
|
||||
if err = rows.Scan(
|
||||
&tmp.ID,
|
||||
&tmp.Name,
|
||||
&tmp.URL,
|
||||
pq.Array(&tmp.Comments),
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *Postgres) CommentsCreate(ctx context.Context, req *pb.CommentsCreateReq) error {
|
||||
tx, err := s.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if rollbackErr := tx.Rollback(); rollbackErr != nil {
|
||||
logger.Error(ctx, "AddComment: unable to rollback: %v", rollbackErr)
|
||||
}
|
||||
} else {
|
||||
err = tx.Commit()
|
||||
}
|
||||
}()
|
||||
|
||||
res, err := tx.ExecContext(ctx, queryAddComment, req.Text, req.PackageId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if aff, affErr := res.RowsAffected(); err != nil {
|
||||
err = affErr
|
||||
} else if aff == 0 {
|
||||
err = errors.New("rows affected is 0")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Postgres) PackagesCreate(ctx context.Context, req *pb.PackagesCreateReq) error {
|
||||
tx, err := s.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if rollbackErr := tx.Rollback(); rollbackErr != nil {
|
||||
logger.Error(ctx, "AddPackage: unable to rollback: %v", rollbackErr)
|
||||
}
|
||||
} else {
|
||||
err = tx.Commit()
|
||||
}
|
||||
}()
|
||||
|
||||
res, err := tx.ExecContext(ctx, queryAddPackage, req.Name, req.Url, pq.Array(req.Modules))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if aff, affErr := res.RowsAffected(); err != nil {
|
||||
err = affErr
|
||||
} else if aff == 0 {
|
||||
err = errors.New("rows affected is 0")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *Postgres) InsertButchModules(ctx context.Context, req []models.Module) ([]uint64, error) {
|
||||
tx, err := s.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if rollbackErr := tx.Rollback(); rollbackErr != nil {
|
||||
logger.Error(ctx, "AddPackage: unable to rollback: %v", rollbackErr)
|
||||
}
|
||||
} else {
|
||||
err = tx.Commit()
|
||||
}
|
||||
}()
|
||||
|
||||
query := generateQuery(req)
|
||||
|
||||
rows, err := tx.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err = rows.Close(); err != nil {
|
||||
return
|
||||
}
|
||||
err = rows.Err()
|
||||
}()
|
||||
|
||||
result := make([]uint64, 0)
|
||||
for rows.Next() {
|
||||
tmp := uint64(0)
|
||||
if err = rows.Scan(&tmp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result = append(result, tmp)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func generateQuery(rsp []models.Module) string {
|
||||
const pattern = `%c('%s', '%s', '%s')`
|
||||
build := strings.Builder{}
|
||||
comma := ' '
|
||||
for i := range rsp {
|
||||
str := fmt.Sprintf(pattern, comma, rsp[i].Name, rsp[i].Version, rsp[i].LastVersion)
|
||||
build.WriteString(str)
|
||||
comma = ','
|
||||
}
|
||||
|
||||
return fmt.Sprintf(queryInsMsgGetIDs, build.String())
|
||||
}
|
40
internal/storage/postgres_tmp/storage_test.go
Normal file
40
internal/storage/postgres_tmp/storage_test.go
Normal file
@ -0,0 +1,40 @@
|
||||
//go:build ignore
|
||||
|
||||
package postgres
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
)
|
||||
|
||||
func TestGenerate(t *testing.T) {
|
||||
m := []models.Module{
|
||||
{
|
||||
ID: 1,
|
||||
Name: "test",
|
||||
Version: "1.2.3",
|
||||
Package: 2,
|
||||
LastVersion: "23.31",
|
||||
},
|
||||
{
|
||||
ID: 1,
|
||||
Name: "321test",
|
||||
Version: "1.3",
|
||||
Package: 4,
|
||||
LastVersion: "2111.31",
|
||||
},
|
||||
{
|
||||
ID: 1,
|
||||
Name: "testabcd",
|
||||
Version: "1.2.3",
|
||||
Package: 2,
|
||||
LastVersion: "153453.31",
|
||||
},
|
||||
}
|
||||
|
||||
str := generateQuery(m)
|
||||
|
||||
fmt.Println(str)
|
||||
}
|
19
internal/storage/sqlite/queries.go
Normal file
19
internal/storage/sqlite/queries.go
Normal file
@ -0,0 +1,19 @@
|
||||
package sqlite
|
||||
|
||||
const (
|
||||
queryPackageModulesCount = `update packages set modules = $2 where id = $1;`
|
||||
queryPackagesModulesCreate = `insert into packages_modules as pm (package, module) values ($1, $2) on conflict (package,module) do nothing;`
|
||||
queryPackagesUpdateLastCheck = `update packages set last_check = CURRENT_TIMESTAMP where id = $1;`
|
||||
queryPackagesModules = `select modules.id, modules.name, modules.version from modules left join packages_modules on modules.id = packages_modules.module left join packages on packages.id = packages_modules.package where packages_modules.package = $1;`
|
||||
queryPackagesProcess = `select id, name, url, comments, modules, issues, created, updated, last_check from packages where ROUND((JULIANDAY(CURRENT_TIMESTAMP) - JULIANDAY(last_check)) * 86400) > $1 or last_check is NULL`
|
||||
queryModulesProcess = `select id, name, version, last_check from modules where ROUND((JULIANDAY(CURRENT_TIMESTAMP) - JULIANDAY(last_check)) * 86400) > $1 or last_check is NULL`
|
||||
queryPackagesModulesCount = `update packages set modules = $2, last_check = CURRENT_TIMESTAMP where id = $1;`
|
||||
queryPackagesList = `select id, name, url, comments, modules, issues, created, updated from packages;`
|
||||
queryPackagesLookup = `select id, name, url, comments, modules, issues, created, updated from packages where id = $1;`
|
||||
queryCommentsCreate = `insert into comments (comment) values ($1) returning id;`
|
||||
queryPackagesCreate = `insert into packages as p (name, url) values ($1, $2) on conflict (url) do update set name = p.name returning *;`
|
||||
queryModulesList = `select id, name, version from modules;`
|
||||
queryModulesCreate = `insert into modules as m (name, version) values ($1, $2) on conflict (name,version) do update set last_check = CURRENT_TIMESTAMP returning *;`
|
||||
queryCommentsList = `select id, text, created, updated from comments where package = $1;`
|
||||
queryHandlersList = `select id, name, coverage from handlers where package = $1;`
|
||||
)
|
254
internal/storage/sqlite/storage.go
Normal file
254
internal/storage/sqlite/storage.go
Normal file
@ -0,0 +1,254 @@
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
"go.unistack.org/pkgdash/internal/storage"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func init() {
|
||||
storage.RegisterStorage("sqlite", NewStorage)
|
||||
}
|
||||
|
||||
var _ storage.Storage = (*Sqlite)(nil)
|
||||
|
||||
type Sqlite struct {
|
||||
logger logger.Logger
|
||||
db *sqlx.DB
|
||||
}
|
||||
|
||||
func NewStorage(log logger.Logger, db *sqlx.DB) interface{} {
|
||||
return &Sqlite{db: db, logger: log}
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error {
|
||||
tx, err := s.db.BeginTxx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, mod := range modules {
|
||||
err = tx.GetContext(ctx, mod, queryModulesCreate, mod.Name, mod.Version)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
_, err = tx.ExecContext(ctx, queryPackagesModulesCreate, pkg.ID, mod.ID)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, queryPackageModulesCount, pkg.ID, len(modules))
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
if err = tx.Commit(); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageDelete(ctx context.Context, req *pb.PackageDeleteReq) error {
|
||||
return fmt.Errorf("need implement")
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageUpdate(ctx context.Context, req *pb.PackageUpdateReq) (*models.Package, error) {
|
||||
return nil, fmt.Errorf("need implement")
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageLookup(ctx context.Context, req *pb.PackageLookupReq) (*models.Package, error) {
|
||||
pkg := &models.Package{}
|
||||
|
||||
err := s.db.GetContext(ctx, pkg, queryPackagesLookup, req.Id)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pkg, err
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageList(ctx context.Context, req *pb.PackageListReq) ([]*models.Package, error) {
|
||||
var packages []*models.Package
|
||||
|
||||
err := s.db.SelectContext(ctx, &packages, queryPackagesList)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageModules(ctx context.Context, req *pb.PackageModulesReq) ([]*models.Module, error) {
|
||||
var modules []*models.Module
|
||||
|
||||
err := s.db.SelectContext(ctx, &modules, queryPackagesModules, req.Package)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return modules, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) CommentDelete(ctx context.Context, req *pb.CommentDeleteReq) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) CommentCreate(ctx context.Context, req *pb.CommentCreateReq) (*models.Comment, error) {
|
||||
tx, err := s.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if rollbackErr := tx.Rollback(); rollbackErr != nil {
|
||||
s.logger.Error(ctx, "AddComment: unable to rollback: %v", rollbackErr)
|
||||
}
|
||||
} else {
|
||||
err = tx.Commit()
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err = tx.ExecContext(ctx, queryCommentsCreate, req.Comment, req.PackageId); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackagesProcess(ctx context.Context, td time.Duration) ([]*models.Package, error) {
|
||||
var packages []*models.Package
|
||||
err := s.db.SelectContext(ctx, &packages, queryPackagesProcess, td.Seconds())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return packages, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackagesUpdateLastCheck(ctx context.Context, packages []*models.Package) error {
|
||||
tx, err := s.db.BeginTxx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, pkg := range packages {
|
||||
if _, err = tx.ExecContext(ctx, queryPackagesUpdateLastCheck, pkg.ID); err != nil {
|
||||
tx.Rollback()
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err = tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) ModulesProcess(ctx context.Context, td time.Duration) ([]*models.Module, error) {
|
||||
var modules []*models.Module
|
||||
err := s.db.SelectContext(ctx, &modules, queryModulesProcess, td.Seconds())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return modules, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) PackageCreate(ctx context.Context, req *pb.PackageCreateReq) (*models.Package, error) {
|
||||
pkg := &models.Package{}
|
||||
err := s.db.GetContext(ctx, pkg, queryPackagesCreate, req.Name, req.Url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) ModuleCreate(ctx context.Context, modules []*models.Module) error {
|
||||
tx, err := s.db.BeginTxx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, mod := range modules {
|
||||
err = tx.GetContext(ctx, mod, queryModulesCreate, mod.Name, mod.Version)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err = tx.Commit(); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) ModuleList(ctx context.Context, req *pb.ModuleListReq) ([]*models.Module, error) {
|
||||
var modules []*models.Module
|
||||
|
||||
err := s.db.SelectContext(ctx, &modules, queryModulesList)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return modules, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) CommentList(ctx context.Context, req *pb.CommentListReq) ([]*models.Comment, error) {
|
||||
var comments []*models.Comment
|
||||
|
||||
err := s.db.SelectContext(ctx, &comments, queryCommentsList, req.Package)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return comments, nil
|
||||
}
|
||||
|
||||
func (s *Sqlite) HandlerList(ctx context.Context, req *pb.HandlerListReq) ([]*models.Handler, error) {
|
||||
var handlers []*models.Handler
|
||||
|
||||
err := s.db.SelectContext(ctx, &handlers, queryHandlersList, req.Package)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return handlers, nil
|
||||
}
|
50
internal/storage/storage.go
Normal file
50
internal/storage/storage.go
Normal file
@ -0,0 +1,50 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func RegisterStorage(name string, fn func(logger.Logger, *sqlx.DB) interface{}) {
|
||||
storages[name] = fn
|
||||
}
|
||||
|
||||
var storages = map[string]func(logger.Logger, *sqlx.DB) interface{}{}
|
||||
|
||||
type Storage interface {
|
||||
PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error
|
||||
PackagesUpdateLastCheck(ctx context.Context, packages []*models.Package) error
|
||||
PackageModules(ctx context.Context, req *pb.PackageModulesReq) ([]*models.Module, error)
|
||||
ModulesProcess(ctx context.Context, td time.Duration) ([]*models.Module, error)
|
||||
PackagesProcess(ctx context.Context, td time.Duration) ([]*models.Package, error)
|
||||
PackageCreate(ctx context.Context, req *pb.PackageCreateReq) (*models.Package, error)
|
||||
HandlerList(ctx context.Context, req *pb.HandlerListReq) ([]*models.Handler, error)
|
||||
PackageList(ctx context.Context, req *pb.PackageListReq) ([]*models.Package, error)
|
||||
PackageLookup(ctx context.Context, req *pb.PackageLookupReq) (*models.Package, error)
|
||||
PackageUpdate(ctx context.Context, req *pb.PackageUpdateReq) (*models.Package, error)
|
||||
PackageDelete(ctx context.Context, req *pb.PackageDeleteReq) error
|
||||
CommentCreate(ctx context.Context, req *pb.CommentCreateReq) (*models.Comment, error)
|
||||
CommentDelete(ctx context.Context, req *pb.CommentDeleteReq) error
|
||||
CommentList(ctx context.Context, req *pb.CommentListReq) ([]*models.Comment, error)
|
||||
ModuleList(ctx context.Context, req *pb.ModuleListReq) ([]*models.Module, error)
|
||||
ModuleCreate(ctx context.Context, modules []*models.Module) error
|
||||
}
|
||||
|
||||
func NewStorage(name string, log logger.Logger, db *sqlx.DB) (Storage, error) {
|
||||
fn, ok := storages[name]
|
||||
if !ok {
|
||||
return nil, errors.New("incorrect name store")
|
||||
}
|
||||
store := fn(log, db)
|
||||
database, ok := store.(Storage)
|
||||
if !ok {
|
||||
return nil, errors.New("dont implements interface Storage")
|
||||
}
|
||||
return database, nil
|
||||
}
|
69
internal/storage/storage_test.go
Normal file
69
internal/storage/storage_test.go
Normal file
@ -0,0 +1,69 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.unistack.org/pkgdash/internal/storage/sqlite"
|
||||
pb "go.unistack.org/pkgdash/proto"
|
||||
)
|
||||
|
||||
func TestGetModule(t *testing.T) {
|
||||
conn, err := sql.Open("sqlite3", "/Users/devstigneev_local/GolandProjects/unistack/pkgdash/identifier.sqlite")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer conn.Close()
|
||||
if err = conn.Ping(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
st := sqlite.NewStorage()
|
||||
store := st(conn, fs)
|
||||
|
||||
s, ok := store.(Storage)
|
||||
if !ok {
|
||||
t.Fatal("dont implements interface Storage")
|
||||
}
|
||||
|
||||
req := &pb.GetModuleReq{
|
||||
Id: []uint64{1, 2, 3},
|
||||
}
|
||||
|
||||
module, err := s.GetModule(context.Background(), req)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(module)
|
||||
}
|
||||
|
||||
func TestGetComment(t *testing.T) {
|
||||
conn, err := sql.Open("sqlite3", "/Users/devstigneev_local/GolandProjects/unistack/pkgdash/identifier.sqlite")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer conn.Close()
|
||||
if err = conn.Ping(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
st := sqlite.NewStorage()
|
||||
store := st(conn, fs)
|
||||
|
||||
s, ok := store.(Storage)
|
||||
if !ok {
|
||||
t.Fatal("dont implements interface Storage")
|
||||
}
|
||||
|
||||
req := &pb.GetCommentsReq{
|
||||
Id: []uint64{1, 2, 3, 15},
|
||||
}
|
||||
comments, err := s.GetComment(context.Background(), req)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
fmt.Println(comments.Decode())
|
||||
}
|
226
internal/worker/worker.go
Normal file
226
internal/worker/worker.go
Normal file
@ -0,0 +1,226 @@
|
||||
package worker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing/filemode"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/storage/memory"
|
||||
"github.com/pkg/errors"
|
||||
"go.unistack.org/micro/v3/logger"
|
||||
"go.unistack.org/pkgdash/internal/models"
|
||||
"go.unistack.org/pkgdash/internal/modules"
|
||||
"go.unistack.org/pkgdash/internal/storage"
|
||||
"golang.org/x/mod/modfile"
|
||||
"golang.org/x/mod/module"
|
||||
)
|
||||
|
||||
func Run(ctx context.Context, log logger.Logger, store storage.Storage, td time.Duration) {
|
||||
modTicker := time.NewTicker(5 * time.Second)
|
||||
defer modTicker.Stop()
|
||||
pkgTicker := time.NewTicker(5 * time.Second)
|
||||
defer pkgTicker.Stop()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-pkgTicker.C:
|
||||
packages, err := store.PackagesProcess(ctx, td)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
continue
|
||||
}
|
||||
log.Fatal(ctx, "failed to get packages to process: %v", err)
|
||||
}
|
||||
wg.Add(len(packages))
|
||||
for _, pkg := range packages {
|
||||
go func(p *models.Package) {
|
||||
if err := parseModFile(ctx, log, store, p); err != nil {
|
||||
log.Error(ctx, "failed to process package %s: %v", p.Name, err)
|
||||
}
|
||||
p.LastCheck.Time = time.Now()
|
||||
wg.Done()
|
||||
}(pkg)
|
||||
}
|
||||
wg.Wait()
|
||||
if err = store.PackagesUpdateLastCheck(ctx, packages); err != nil {
|
||||
log.Error(ctx, "update packages last_check %#+v, err: %v", packages, err)
|
||||
}
|
||||
case <-modTicker.C:
|
||||
modules, err := store.ModulesProcess(ctx, td)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
continue
|
||||
}
|
||||
log.Fatal(ctx, "failed to get modules to process: %v", err)
|
||||
}
|
||||
if err := processModules(ctx, log, store, modules); err != nil {
|
||||
log.Error(ctx, "failed to process modules: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func parseModFile(ctx context.Context, log logger.Logger, store storage.Storage, pkg *models.Package) error {
|
||||
log.Info(ctx, "process package %v", pkg)
|
||||
|
||||
u, err := url.Parse(pkg.URL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var rev string
|
||||
if idx := strings.Index(u.Path, "@"); idx > 0 {
|
||||
rev = u.Path[idx+1:]
|
||||
}
|
||||
|
||||
cloneOpts := &git.CloneOptions{
|
||||
URL: pkg.URL,
|
||||
Progress: os.Stdout,
|
||||
}
|
||||
|
||||
if len(rev) == 0 {
|
||||
cloneOpts.SingleBranch = true
|
||||
cloneOpts.Depth = 1
|
||||
}
|
||||
|
||||
if err = cloneOpts.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
repo, err := git.CloneContext(ctx, memory.NewStorage(), nil, cloneOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ref, err := repo.Head()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get head: %v", err)
|
||||
}
|
||||
|
||||
commit, err := repo.CommitObject(ref.Hash())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get commit: %v", err)
|
||||
}
|
||||
|
||||
tree, err := commit.Tree()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
unique := make(map[string]*models.Module)
|
||||
var mvs []module.Version
|
||||
err = tree.Files().ForEach(func(file *object.File) error {
|
||||
if file == nil {
|
||||
err = errors.New("file pointer is nil")
|
||||
log.Error(ctx, "file tree error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
switch file.Mode {
|
||||
case filemode.Regular:
|
||||
if strings.HasSuffix(file.Name, "go.mod") {
|
||||
if mvs, err = parseFile(file); err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range mvs {
|
||||
unique[mvs[i].Path] = &models.Module{
|
||||
Name: mvs[i].Path,
|
||||
Version: mvs[i].Version,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
modules := make([]*models.Module, 0, len(unique))
|
||||
for _, v := range unique {
|
||||
modules = append(modules, v)
|
||||
}
|
||||
|
||||
sort.Slice(modules, func(i, j int) bool {
|
||||
return modules[i].Name < modules[j].Name
|
||||
})
|
||||
|
||||
if err = store.PackageModulesCreate(ctx, pkg, modules); err != nil {
|
||||
log.Error(ctx, "failed to set create modules: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processModules(ctx context.Context, log logger.Logger, store storage.Storage, mods []*models.Module) error {
|
||||
mvs := make(map[string]*models.Module, len(mods))
|
||||
|
||||
for _, mod := range mods {
|
||||
mvs[mod.Name] = mod
|
||||
}
|
||||
|
||||
mvsu := make([]module.Version, 0, len(mvs))
|
||||
for _, mv := range mvs {
|
||||
mvsu = append(mvsu, module.Version{Path: mv.Name, Version: mv.Version})
|
||||
}
|
||||
|
||||
modules.Updates(modules.UpdateOptions{
|
||||
Pre: false,
|
||||
Major: false,
|
||||
Cached: false,
|
||||
Modules: mvsu,
|
||||
OnUpdate: func(u modules.Update) {
|
||||
if u.Err != nil {
|
||||
log.Error(ctx, "%s: failed: %v", u.Module.Path, u.Err)
|
||||
} else {
|
||||
mvs[u.Module.Path].Version = u.Version
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
if err := store.ModuleCreate(ctx, mods); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseFile(file *object.File) ([]module.Version, error) {
|
||||
r, err := file.Reader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(r)
|
||||
r.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
modfile, err := modfile.ParseLax("go.mod", data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mods := make([]module.Version, 0, len(modfile.Require))
|
||||
for _, req := range modfile.Require {
|
||||
mods = append(mods, req.Mod)
|
||||
}
|
||||
|
||||
sort.Slice(mods, func(i, j int) bool {
|
||||
return mods[i].Path < mods[j].Path
|
||||
})
|
||||
|
||||
return mods, nil
|
||||
}
|
8
local.yaml
Normal file
8
local.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
server:
|
||||
addr: ":9091"
|
||||
logger_level: "debug"
|
||||
meter:
|
||||
addr: ":8081"
|
||||
path: "/metrics"
|
||||
database:
|
||||
dsn: "sqlite://./database.db?migrate=up"
|
519
proto/apidocs.swagger.yaml
Normal file
519
proto/apidocs.swagger.yaml
Normal file
@ -0,0 +1,519 @@
|
||||
# Generated with protoc-gen-go-micro
|
||||
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Pkgdash API
|
||||
version: 0.0.1
|
||||
paths:
|
||||
/v1/comments/{id}:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: CommentLookup
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
- name: package
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CommentLookupRsp'
|
||||
/v1/modules:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: ModuleList
|
||||
parameters:
|
||||
- name: package
|
||||
in: query
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ModuleListRsp'
|
||||
/v1/packages:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageList
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageListRsp'
|
||||
post:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageCreate
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageCreateReq'
|
||||
required: true
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageCreateRsp'
|
||||
/v1/packages/{id}:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageLookup
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageLookupRsp'
|
||||
put:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageUpdate
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageUpdateReq'
|
||||
required: true
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageUpdateRsp'
|
||||
delete:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageDelete
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageDeleteRsp'
|
||||
/v1/packages/{package_id}/comments/{id}:
|
||||
delete:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: CommentDelete
|
||||
parameters:
|
||||
- name: package_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CommentDeleteRsp'
|
||||
/v1/packages/{package}/comments:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: CommentList
|
||||
parameters:
|
||||
- name: package
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CommentListRsp'
|
||||
post:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: CommentCreate
|
||||
parameters:
|
||||
- name: package
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CommentCreateReq'
|
||||
required: true
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CommentCreateRsp'
|
||||
/v1/packages/{package}/handlers:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: HandlerList
|
||||
parameters:
|
||||
- name: package
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HandlerListRsp'
|
||||
/v1/packages/{package}/modules:
|
||||
get:
|
||||
tags:
|
||||
- Pkgdash
|
||||
operationId: PackageModules
|
||||
parameters:
|
||||
- name: package
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: integer
|
||||
format: uint64
|
||||
responses:
|
||||
default:
|
||||
description: Default
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorRsp'
|
||||
"200":
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PackageModulesRsp'
|
||||
components:
|
||||
schemas:
|
||||
Comment:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: uint64
|
||||
package:
|
||||
type: integer
|
||||
format: uint64
|
||||
comment:
|
||||
type: string
|
||||
created:
|
||||
type: string
|
||||
format: RFC3339
|
||||
updated:
|
||||
type: string
|
||||
format: RFC3339
|
||||
CommentCreateReq:
|
||||
type: object
|
||||
properties:
|
||||
package_id:
|
||||
type: integer
|
||||
format: uint64
|
||||
comment:
|
||||
type: string
|
||||
CommentCreateRsp:
|
||||
type: object
|
||||
properties:
|
||||
comment:
|
||||
$ref: '#/components/schemas/Comment'
|
||||
CommentDeleteRsp:
|
||||
type: object
|
||||
properties: {}
|
||||
CommentListRsp:
|
||||
type: object
|
||||
properties:
|
||||
comments:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Comment'
|
||||
CommentLookupRsp:
|
||||
type: object
|
||||
properties:
|
||||
comment:
|
||||
$ref: '#/components/schemas/Comment'
|
||||
ErrorRsp:
|
||||
type: object
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
title:
|
||||
type: string
|
||||
uuid:
|
||||
type: string
|
||||
details:
|
||||
type: string
|
||||
Handler:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: uint64
|
||||
package:
|
||||
type: integer
|
||||
format: uint64
|
||||
name:
|
||||
type: string
|
||||
coverage:
|
||||
type: number
|
||||
format: double
|
||||
HandlerListRsp:
|
||||
type: object
|
||||
properties:
|
||||
handlers:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Handler'
|
||||
Module:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: uint64
|
||||
name:
|
||||
type: string
|
||||
version:
|
||||
type: string
|
||||
last_check:
|
||||
type: string
|
||||
format: RFC3339
|
||||
ModuleListRsp:
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Module'
|
||||
Package:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: uint64
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
modules:
|
||||
type: integer
|
||||
format: uint64
|
||||
issues:
|
||||
type: integer
|
||||
format: uint64
|
||||
comments:
|
||||
type: integer
|
||||
format: uint64
|
||||
handlers:
|
||||
type: integer
|
||||
format: uint64
|
||||
created:
|
||||
type: string
|
||||
format: RFC3339
|
||||
updated:
|
||||
type: string
|
||||
format: RFC3339
|
||||
last_check:
|
||||
type: string
|
||||
format: RFC3339
|
||||
type:
|
||||
type: string
|
||||
coverage:
|
||||
type: number
|
||||
format: double
|
||||
PackageCreateReq:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
PackageCreateRsp:
|
||||
type: object
|
||||
properties:
|
||||
package:
|
||||
$ref: '#/components/schemas/Package'
|
||||
PackageDeleteRsp:
|
||||
type: object
|
||||
properties: {}
|
||||
PackageListRsp:
|
||||
type: object
|
||||
properties:
|
||||
packages:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Package'
|
||||
PackageLookupRsp:
|
||||
type: object
|
||||
properties:
|
||||
package:
|
||||
$ref: '#/components/schemas/Package'
|
||||
PackageModulesRsp:
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Module'
|
||||
PackageUpdateReq:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
format: uint64
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
modules:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
format: uint64
|
||||
issues:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
format: uint64
|
||||
coverprofile:
|
||||
type: string
|
||||
format: bytes
|
||||
PackageUpdateRsp:
|
||||
type: object
|
||||
properties:
|
||||
package:
|
||||
$ref: '#/components/schemas/Package'
|
||||
tags:
|
||||
- name: Pkgdash
|
17
proto/micro_errors.pb.go
Normal file
17
proto/micro_errors.pb.go
Normal file
@ -0,0 +1,17 @@
|
||||
// Code generated by protoc-gen-go-micro. DO NOT EDIT.
|
||||
// protoc-gen-go-micro version: v3.10.4
|
||||
|
||||
package pkgdashpb
|
||||
|
||||
import (
|
||||
protojson "google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
var (
|
||||
marshaler = protojson.MarshalOptions{}
|
||||
)
|
||||
|
||||
func (m *ErrorRsp) Error() string {
|
||||
buf, _ := marshaler.Marshal(m)
|
||||
return string(buf)
|
||||
}
|
2524
proto/pkgdash.pb.go
Normal file
2524
proto/pkgdash.pb.go
Normal file
File diff suppressed because it is too large
Load Diff
3838
proto/pkgdash.pb.validate.go
Normal file
3838
proto/pkgdash.pb.validate.go
Normal file
File diff suppressed because it is too large
Load Diff
318
proto/pkgdash.proto
Normal file
318
proto/pkgdash.proto
Normal file
@ -0,0 +1,318 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package pkgdash;
|
||||
|
||||
import "api/annotations.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "openapiv3/annotations.proto";
|
||||
import "validate/validate.proto";
|
||||
|
||||
option go_package = "go.unistack.org/pkgdash/proto;pkgdashpb";
|
||||
|
||||
service Pkgdash {
|
||||
rpc PackageLookup(PackageLookupReq) returns (PackageLookupRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageLookup";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/packages/{id}"};
|
||||
}
|
||||
rpc PackageCreate(PackageCreateReq) returns (PackageCreateRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageCreate";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {
|
||||
post: "/v1/packages";
|
||||
body: "*";
|
||||
};
|
||||
}
|
||||
rpc PackageDelete(PackageDeleteReq) returns (PackageDeleteRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageDelete";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {delete: "/v1/packages/{id}"};
|
||||
}
|
||||
rpc PackageList(PackageListReq) returns (PackageListRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageList";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/packages"};
|
||||
}
|
||||
rpc HandlerList(HandlerListReq) returns (HandlerListRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "HandlerList";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/packages/{package}/handlers"};
|
||||
}
|
||||
rpc PackageModules(PackageModulesReq) returns (PackageModulesRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageModules";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/packages/{package}/modules"};
|
||||
}
|
||||
rpc PackageUpdate(PackageUpdateReq) returns (PackageUpdateRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "PackageUpdate";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {
|
||||
put: "/v1/packages/{id}";
|
||||
body: "*";
|
||||
};
|
||||
}
|
||||
rpc CommentCreate(CommentCreateReq) returns (CommentCreateRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "CommentCreate";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {
|
||||
post: "/v1/packages/{package}/comments";
|
||||
body: "*";
|
||||
};
|
||||
}
|
||||
rpc CommentLookup(CommentLookupReq) returns (CommentLookupRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "CommentLookup";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {
|
||||
get: "/v1/comments/{id}";
|
||||
additional_bindings {get: "/v1/comments/{package}/comments/{id}"};
|
||||
};
|
||||
}
|
||||
rpc CommentList(CommentListReq) returns (CommentListRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "CommentList";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/packages/{package}/comments"};
|
||||
}
|
||||
rpc CommentDelete(CommentDeleteReq) returns (CommentDeleteRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "CommentDelete";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {
|
||||
delete: "/v1/packages/{package_id}/comments/{id}";
|
||||
additional_bindings {delete: "/v1/comments/{id}"};
|
||||
};
|
||||
}
|
||||
rpc ModuleList(ModuleListReq) returns (ModuleListRsp) {
|
||||
option (micro.openapiv3.openapiv3_operation) = {
|
||||
operation_id: "ModuleList";
|
||||
responses: {
|
||||
default: {
|
||||
reference: {_ref: ".pkgdash.ErrorRsp"};
|
||||
};
|
||||
};
|
||||
};
|
||||
option (micro.api.http) = {get: "/v1/modules"};
|
||||
}
|
||||
}
|
||||
|
||||
message HandlerListReq {
|
||||
uint64 package = 1;
|
||||
}
|
||||
|
||||
message HandlerListRsp {
|
||||
repeated Handler handlers = 1;
|
||||
}
|
||||
|
||||
message PackageModulesReq {
|
||||
uint64 package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message PackageModulesRsp {
|
||||
repeated Module modules = 1 [json_name = "modules"];
|
||||
}
|
||||
|
||||
message PackageLookupReq {
|
||||
uint64 id = 1 [json_name = "id"];
|
||||
}
|
||||
|
||||
message PackageLookupRsp {
|
||||
Package package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message ErrorRsp {
|
||||
string code = 1 [json_name = "code"];
|
||||
string title = 2 [json_name = "title"];
|
||||
string uuid = 3 [json_name = "uuid"];
|
||||
string details = 4 [json_name = "details"];
|
||||
}
|
||||
|
||||
message Package {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
string name = 2 [(validate.rules).string.min_len = 1];
|
||||
string url = 3 [(validate.rules).string.min_len = 1];
|
||||
string description = 4 [(validate.rules).string.min_len = 1];
|
||||
uint64 modules = 5;
|
||||
uint64 issues = 6;
|
||||
uint64 comments = 7;
|
||||
uint64 handlers = 8;
|
||||
google.protobuf.Timestamp created = 9;
|
||||
google.protobuf.Timestamp updated = 10;
|
||||
google.protobuf.Timestamp last_check = 11;
|
||||
string type = 12;
|
||||
double coverage = 13;
|
||||
}
|
||||
|
||||
message Handler {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
uint64 package = 2 [(validate.rules).uint64.gt = 0];
|
||||
string name = 3 [(validate.rules).string.min_len = 1];
|
||||
double coverage = 4;
|
||||
}
|
||||
|
||||
message Module {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
string name = 2 [(validate.rules).string.min_len = 1];
|
||||
string version = 3 [(validate.rules).string.min_len = 1];
|
||||
google.protobuf.Timestamp last_check = 8;
|
||||
}
|
||||
|
||||
message Issue {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
uint64 status = 2 [(validate.rules).uint64.gt = 0];
|
||||
string desc = 3 [(validate.rules).string.min_len = 1];
|
||||
uint64 package = 4 [(validate.rules).uint64.gt = 0];
|
||||
repeated uint64 modules = 5;
|
||||
google.protobuf.Timestamp created = 6;
|
||||
google.protobuf.Timestamp updated = 7;
|
||||
}
|
||||
|
||||
message Comment {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
uint64 package = 2 [(validate.rules).uint64.gt = 0];
|
||||
string comment = 3;
|
||||
google.protobuf.Timestamp created = 4;
|
||||
google.protobuf.Timestamp updated = 5;
|
||||
}
|
||||
|
||||
message CommentDeleteReq {
|
||||
uint64 id = 1 [json_name = "id"];
|
||||
uint64 package_id = 2 [json_name = "package_id"];
|
||||
}
|
||||
|
||||
message CommentDeleteRsp {}
|
||||
|
||||
message PackageDeleteReq {
|
||||
uint64 id = 1 [json_name = "id"];
|
||||
}
|
||||
|
||||
message PackageDeleteRsp {}
|
||||
|
||||
message PackageListReq {}
|
||||
|
||||
message PackageListRsp {
|
||||
repeated Package packages = 1;
|
||||
}
|
||||
|
||||
message PackageUpdateReq {
|
||||
uint64 id = 1 [(validate.rules).uint64.gt = 0];
|
||||
string name = 2 [(validate.rules).string.min_len = 1];
|
||||
string url = 3 [(validate.rules).string.min_len = 1];
|
||||
repeated uint64 modules = 4;
|
||||
repeated uint64 issues = 5;
|
||||
bytes coverprofile = 6;
|
||||
}
|
||||
|
||||
message PackageUpdateRsp {
|
||||
Package package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message CommentCreateReq {
|
||||
uint64 package_id = 1 [
|
||||
json_name = "package_id",
|
||||
(validate.rules).uint64.gt = 0
|
||||
];
|
||||
string comment = 2;
|
||||
}
|
||||
|
||||
message CommentCreateRsp {
|
||||
Comment comment = 1 [json_name = "comment"];
|
||||
}
|
||||
|
||||
message PackageCreateReq {
|
||||
string name = 1 [(validate.rules).string.min_len = 1];
|
||||
string url = 2 [(validate.rules).string.min_len = 1];
|
||||
string description = 3;
|
||||
}
|
||||
|
||||
message PackageCreateRsp {
|
||||
Package package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message ModuleListReq {
|
||||
uint64 package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message ModuleListRsp {
|
||||
repeated Module modules = 1 [json_name = "modules"];
|
||||
}
|
||||
|
||||
message CommentListReq {
|
||||
uint64 package = 1 [json_name = "package"];
|
||||
}
|
||||
|
||||
message CommentListRsp {
|
||||
repeated Comment comments = 1 [json_name = "comments"];
|
||||
}
|
||||
|
||||
message CommentLookupReq {
|
||||
uint64 id = 1 [json_name = "id"];
|
||||
uint64 package = 2 [json_name = "package"];
|
||||
}
|
||||
|
||||
message CommentLookupRsp {
|
||||
Comment comment = 1 [json_name = "comment"];
|
||||
}
|
46
proto/pkgdash_micro.pb.go
Normal file
46
proto/pkgdash_micro.pb.go
Normal file
@ -0,0 +1,46 @@
|
||||
// Code generated by protoc-gen-go-micro. DO NOT EDIT.
|
||||
// versions:
|
||||
// - protoc-gen-go-micro v3.10.4
|
||||
// - protoc v5.28.3
|
||||
// source: pkgdash.proto
|
||||
|
||||
package pkgdashpb
|
||||
|
||||
import (
|
||||
context "context"
|
||||
client "go.unistack.org/micro/v3/client"
|
||||
)
|
||||
|
||||
var (
|
||||
PkgdashName = "Pkgdash"
|
||||
)
|
||||
|
||||
type PkgdashClient interface {
|
||||
PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...client.CallOption) (*PackageLookupRsp, error)
|
||||
PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...client.CallOption) (*PackageCreateRsp, error)
|
||||
PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...client.CallOption) (*PackageDeleteRsp, error)
|
||||
PackageList(ctx context.Context, req *PackageListReq, opts ...client.CallOption) (*PackageListRsp, error)
|
||||
HandlerList(ctx context.Context, req *HandlerListReq, opts ...client.CallOption) (*HandlerListRsp, error)
|
||||
PackageModules(ctx context.Context, req *PackageModulesReq, opts ...client.CallOption) (*PackageModulesRsp, error)
|
||||
PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...client.CallOption) (*PackageUpdateRsp, error)
|
||||
CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...client.CallOption) (*CommentCreateRsp, error)
|
||||
CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...client.CallOption) (*CommentLookupRsp, error)
|
||||
CommentList(ctx context.Context, req *CommentListReq, opts ...client.CallOption) (*CommentListRsp, error)
|
||||
CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...client.CallOption) (*CommentDeleteRsp, error)
|
||||
ModuleList(ctx context.Context, req *ModuleListReq, opts ...client.CallOption) (*ModuleListRsp, error)
|
||||
}
|
||||
|
||||
type PkgdashServer interface {
|
||||
PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error
|
||||
PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error
|
||||
PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error
|
||||
PackageList(ctx context.Context, req *PackageListReq, rsp *PackageListRsp) error
|
||||
HandlerList(ctx context.Context, req *HandlerListReq, rsp *HandlerListRsp) error
|
||||
PackageModules(ctx context.Context, req *PackageModulesReq, rsp *PackageModulesRsp) error
|
||||
PackageUpdate(ctx context.Context, req *PackageUpdateReq, rsp *PackageUpdateRsp) error
|
||||
CommentCreate(ctx context.Context, req *CommentCreateReq, rsp *CommentCreateRsp) error
|
||||
CommentLookup(ctx context.Context, req *CommentLookupReq, rsp *CommentLookupRsp) error
|
||||
CommentList(ctx context.Context, req *CommentListReq, rsp *CommentListRsp) error
|
||||
CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error
|
||||
ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error
|
||||
}
|
421
proto/pkgdash_micro_http.pb.go
Normal file
421
proto/pkgdash_micro_http.pb.go
Normal file
@ -0,0 +1,421 @@
|
||||
// Code generated by protoc-gen-go-micro. DO NOT EDIT.
|
||||
// protoc-gen-go-micro version: v3.10.4
|
||||
// source: pkgdash.proto
|
||||
|
||||
package pkgdashpb
|
||||
|
||||
import (
|
||||
context "context"
|
||||
v31 "go.unistack.org/micro-client-http/v3"
|
||||
v3 "go.unistack.org/micro-server-http/v3"
|
||||
client "go.unistack.org/micro/v3/client"
|
||||
server "go.unistack.org/micro/v3/server"
|
||||
http "net/http"
|
||||
)
|
||||
|
||||
var (
|
||||
PkgdashServerEndpoints = []v3.EndpointMetadata{
|
||||
{
|
||||
Name: "Pkgdash.PackageLookup",
|
||||
Path: "/v1/packages/{id}",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.PackageCreate",
|
||||
Path: "/v1/packages",
|
||||
Method: "POST",
|
||||
Body: "*",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.PackageDelete",
|
||||
Path: "/v1/packages/{id}",
|
||||
Method: "DELETE",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.PackageList",
|
||||
Path: "/v1/packages",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.HandlerList",
|
||||
Path: "/v1/packages/{package}/handlers",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.PackageModules",
|
||||
Path: "/v1/packages/{package}/modules",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.PackageUpdate",
|
||||
Path: "/v1/packages/{id}",
|
||||
Method: "PUT",
|
||||
Body: "*",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentCreate",
|
||||
Path: "/v1/packages/{package}/comments",
|
||||
Method: "POST",
|
||||
Body: "*",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentLookup",
|
||||
Path: "/v1/comments/{id}",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentLookup",
|
||||
Path: "/v1/comments/{package}/comments/{id}",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentList",
|
||||
Path: "/v1/packages/{package}/comments",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentDelete",
|
||||
Path: "/v1/packages/{package_id}/comments/{id}",
|
||||
Method: "DELETE",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.CommentDelete",
|
||||
Path: "/v1/comments/{id}",
|
||||
Method: "DELETE",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
{
|
||||
Name: "Pkgdash.ModuleList",
|
||||
Path: "/v1/modules",
|
||||
Method: "GET",
|
||||
Body: "",
|
||||
Stream: false,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type pkgdashClient struct {
|
||||
c client.Client
|
||||
name string
|
||||
}
|
||||
|
||||
func NewPkgdashClient(name string, c client.Client) PkgdashClient {
|
||||
return &pkgdashClient{c: c, name: name}
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...client.CallOption) (*PackageLookupRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/packages/{id}"),
|
||||
)
|
||||
rsp := &PackageLookupRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageLookup", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...client.CallOption) (*PackageCreateRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodPost),
|
||||
v31.Path("/v1/packages"),
|
||||
v31.Body("*"),
|
||||
)
|
||||
rsp := &PackageCreateRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageCreate", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...client.CallOption) (*PackageDeleteRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodDelete),
|
||||
v31.Path("/v1/packages/{id}"),
|
||||
)
|
||||
rsp := &PackageDeleteRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageDelete", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageList(ctx context.Context, req *PackageListReq, opts ...client.CallOption) (*PackageListRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/packages"),
|
||||
)
|
||||
rsp := &PackageListRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageList", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) HandlerList(ctx context.Context, req *HandlerListReq, opts ...client.CallOption) (*HandlerListRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/packages/{package}/handlers"),
|
||||
)
|
||||
rsp := &HandlerListRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.HandlerList", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageModules(ctx context.Context, req *PackageModulesReq, opts ...client.CallOption) (*PackageModulesRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/packages/{package}/modules"),
|
||||
)
|
||||
rsp := &PackageModulesRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageModules", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...client.CallOption) (*PackageUpdateRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodPut),
|
||||
v31.Path("/v1/packages/{id}"),
|
||||
v31.Body("*"),
|
||||
)
|
||||
rsp := &PackageUpdateRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageUpdate", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...client.CallOption) (*CommentCreateRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodPost),
|
||||
v31.Path("/v1/packages/{package}/comments"),
|
||||
v31.Body("*"),
|
||||
)
|
||||
rsp := &CommentCreateRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentCreate", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...client.CallOption) (*CommentLookupRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/comments/{id}"),
|
||||
)
|
||||
rsp := &CommentLookupRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentLookup", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) CommentList(ctx context.Context, req *CommentListReq, opts ...client.CallOption) (*CommentListRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/packages/{package}/comments"),
|
||||
)
|
||||
rsp := &CommentListRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentList", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...client.CallOption) (*CommentDeleteRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodDelete),
|
||||
v31.Path("/v1/packages/{package_id}/comments/{id}"),
|
||||
)
|
||||
rsp := &CommentDeleteRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentDelete", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
func (c *pkgdashClient) ModuleList(ctx context.Context, req *ModuleListReq, opts ...client.CallOption) (*ModuleListRsp, error) {
|
||||
errmap := make(map[string]interface{}, 1)
|
||||
errmap["default"] = &ErrorRsp{}
|
||||
opts = append(opts,
|
||||
v31.ErrorMap(errmap),
|
||||
)
|
||||
opts = append(opts,
|
||||
v31.Method(http.MethodGet),
|
||||
v31.Path("/v1/modules"),
|
||||
)
|
||||
rsp := &ModuleListRsp{}
|
||||
err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.ModuleList", req), rsp, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rsp, nil
|
||||
}
|
||||
|
||||
type pkgdashServer struct {
|
||||
PkgdashServer
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error {
|
||||
return h.PkgdashServer.PackageLookup(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error {
|
||||
return h.PkgdashServer.PackageCreate(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error {
|
||||
return h.PkgdashServer.PackageDelete(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageList(ctx context.Context, req *PackageListReq, rsp *PackageListRsp) error {
|
||||
return h.PkgdashServer.PackageList(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) HandlerList(ctx context.Context, req *HandlerListReq, rsp *HandlerListRsp) error {
|
||||
return h.PkgdashServer.HandlerList(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageModules(ctx context.Context, req *PackageModulesReq, rsp *PackageModulesRsp) error {
|
||||
return h.PkgdashServer.PackageModules(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) PackageUpdate(ctx context.Context, req *PackageUpdateReq, rsp *PackageUpdateRsp) error {
|
||||
return h.PkgdashServer.PackageUpdate(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) CommentCreate(ctx context.Context, req *CommentCreateReq, rsp *CommentCreateRsp) error {
|
||||
return h.PkgdashServer.CommentCreate(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) CommentLookup(ctx context.Context, req *CommentLookupReq, rsp *CommentLookupRsp) error {
|
||||
return h.PkgdashServer.CommentLookup(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) CommentList(ctx context.Context, req *CommentListReq, rsp *CommentListRsp) error {
|
||||
return h.PkgdashServer.CommentList(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error {
|
||||
return h.PkgdashServer.CommentDelete(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func (h *pkgdashServer) ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error {
|
||||
return h.PkgdashServer.ModuleList(ctx, req, rsp)
|
||||
}
|
||||
|
||||
func RegisterPkgdashServer(s server.Server, sh PkgdashServer, opts ...server.HandlerOption) error {
|
||||
type pkgdash interface {
|
||||
PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error
|
||||
PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error
|
||||
PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error
|
||||
PackageList(ctx context.Context, req *PackageListReq, rsp *PackageListRsp) error
|
||||
HandlerList(ctx context.Context, req *HandlerListReq, rsp *HandlerListRsp) error
|
||||
PackageModules(ctx context.Context, req *PackageModulesReq, rsp *PackageModulesRsp) error
|
||||
PackageUpdate(ctx context.Context, req *PackageUpdateReq, rsp *PackageUpdateRsp) error
|
||||
CommentCreate(ctx context.Context, req *CommentCreateReq, rsp *CommentCreateRsp) error
|
||||
CommentLookup(ctx context.Context, req *CommentLookupReq, rsp *CommentLookupRsp) error
|
||||
CommentList(ctx context.Context, req *CommentListReq, rsp *CommentListRsp) error
|
||||
CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error
|
||||
ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error
|
||||
}
|
||||
type Pkgdash struct {
|
||||
pkgdash
|
||||
}
|
||||
h := &pkgdashServer{sh}
|
||||
var nopts []server.HandlerOption
|
||||
nopts = append(nopts, v3.HandlerEndpoints(PkgdashServerEndpoints))
|
||||
return s.Handle(s.NewHandler(&Pkgdash{h}, append(nopts, opts...)...))
|
||||
}
|
16
ui/.browserslistrc
Normal file
16
ui/.browserslistrc
Normal file
@ -0,0 +1,16 @@
|
||||
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
|
||||
# For additional information regarding the format and rule options, please see:
|
||||
# https://github.com/browserslist/browserslist#queries
|
||||
|
||||
# For the full list of supported browsers by the Angular framework, please see:
|
||||
# https://angular.io/guide/browser-support
|
||||
|
||||
# You can see what browsers were selected by your queries by running:
|
||||
# npx browserslist
|
||||
|
||||
last 1 Chrome version
|
||||
last 1 Firefox version
|
||||
last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
3
ui/.dockerignore
Normal file
3
ui/.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
*/node_modules
|
||||
**/node_modules
|
16
ui/.editorconfig
Normal file
16
ui/.editorconfig
Normal file
@ -0,0 +1,16 @@
|
||||
# Editor configuration, see https://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
quote_type = single
|
||||
|
||||
[*.md]
|
||||
max_line_length = off
|
||||
trim_trailing_whitespace = false
|
42
ui/.gitignore
vendored
Normal file
42
ui/.gitignore
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# Compiled output
|
||||
/dist
|
||||
/tmp
|
||||
/out-tsc
|
||||
/bazel-out
|
||||
|
||||
# Node
|
||||
/node_modules
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
# IDEs and editors
|
||||
.idea/
|
||||
.project
|
||||
.classpath
|
||||
.c9/
|
||||
*.launch
|
||||
.settings/
|
||||
*.sublime-workspace
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.history/*
|
||||
|
||||
# Miscellaneous
|
||||
/.angular/cache
|
||||
.sass-cache/
|
||||
/connect.lock
|
||||
/coverage
|
||||
/libpeerconnection.log
|
||||
testem.log
|
||||
/typings
|
||||
|
||||
# System files
|
||||
.DS_Store
|
||||
Thumbs.db
|
27
ui/README.md
Normal file
27
ui/README.md
Normal file
@ -0,0 +1,27 @@
|
||||
# Ui
|
||||
|
||||
This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 13.1.4.
|
||||
|
||||
## Development server
|
||||
|
||||
Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
|
||||
|
||||
## Code scaffolding
|
||||
|
||||
Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive|pipe|service|class|guard|interface|enum|module`.
|
||||
|
||||
## Build
|
||||
|
||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory.
|
||||
|
||||
## Running unit tests
|
||||
|
||||
Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).
|
||||
|
||||
## Running end-to-end tests
|
||||
|
||||
Run `ng e2e` to execute the end-to-end tests via a platform of your choice. To use this command, you need to first add a package that implements end-to-end testing capabilities.
|
||||
|
||||
## Further help
|
||||
|
||||
To get more help on the Angular CLI use `ng help` or go check out the [Angular CLI Overview and Command Reference](https://angular.io/cli) page.
|
115
ui/angular.json
Normal file
115
ui/angular.json
Normal file
@ -0,0 +1,115 @@
|
||||
{
|
||||
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
|
||||
"version": 1,
|
||||
"newProjectRoot": "projects",
|
||||
"projects": {
|
||||
"ui": {
|
||||
"projectType": "application",
|
||||
"schematics": {
|
||||
"@schematics/angular:component": {
|
||||
"style": "scss"
|
||||
},
|
||||
"@schematics/angular:application": {
|
||||
"strict": true
|
||||
}
|
||||
},
|
||||
"root": "",
|
||||
"sourceRoot": "src",
|
||||
"prefix": "app",
|
||||
"architect": {
|
||||
"build": {
|
||||
"builder": "@angular-devkit/build-angular:browser",
|
||||
"options": {
|
||||
"outputPath": "dist/ui",
|
||||
"index": "src/index.html",
|
||||
"main": "src/main.ts",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
"tsConfig": "tsconfig.app.json",
|
||||
"inlineStyleLanguage": "scss",
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/assets"
|
||||
],
|
||||
"styles": [
|
||||
"./node_modules/@angular/material/prebuilt-themes/indigo-pink.css",
|
||||
"src/styles.scss"
|
||||
],
|
||||
"scripts": []
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"budgets": [
|
||||
{
|
||||
"type": "initial",
|
||||
"maximumWarning": "500kb",
|
||||
"maximumError": "1mb"
|
||||
},
|
||||
{
|
||||
"type": "anyComponentStyle",
|
||||
"maximumWarning": "2kb",
|
||||
"maximumError": "4kb"
|
||||
}
|
||||
],
|
||||
"fileReplacements": [
|
||||
{
|
||||
"replace": "src/environments/environment.ts",
|
||||
"with": "src/environments/environment.prod.ts"
|
||||
}
|
||||
],
|
||||
"outputHashing": "all"
|
||||
},
|
||||
"development": {
|
||||
"buildOptimizer": false,
|
||||
"optimization": false,
|
||||
"vendorChunk": true,
|
||||
"extractLicenses": false,
|
||||
"sourceMap": true,
|
||||
"namedChunks": true
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "production"
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"configurations": {
|
||||
"production": {
|
||||
"browserTarget": "ui:build:production"
|
||||
},
|
||||
"development": {
|
||||
"browserTarget": "ui:build:development"
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": "development"
|
||||
},
|
||||
"extract-i18n": {
|
||||
"builder": "@angular-devkit/build-angular:extract-i18n",
|
||||
"options": {
|
||||
"browserTarget": "ui:build"
|
||||
}
|
||||
},
|
||||
"test": {
|
||||
"builder": "@angular-devkit/build-angular:karma",
|
||||
"options": {
|
||||
"main": "src/test.ts",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
"tsConfig": "tsconfig.spec.json",
|
||||
"karmaConfig": "karma.conf.js",
|
||||
"inlineStyleLanguage": "scss",
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/assets"
|
||||
],
|
||||
"styles": [
|
||||
"./node_modules/@angular/material/prebuilt-themes/indigo-pink.css",
|
||||
"src/styles.scss"
|
||||
],
|
||||
"scripts": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cli": {
|
||||
"analytics": false
|
||||
}
|
||||
}
|
44
ui/karma.conf.js
Normal file
44
ui/karma.conf.js
Normal file
@ -0,0 +1,44 @@
|
||||
// Karma configuration file, see link for more information
|
||||
// https://karma-runner.github.io/1.0/config/configuration-file.html
|
||||
|
||||
module.exports = function (config) {
|
||||
config.set({
|
||||
basePath: '',
|
||||
frameworks: ['jasmine', '@angular-devkit/build-angular'],
|
||||
plugins: [
|
||||
require('karma-jasmine'),
|
||||
require('karma-chrome-launcher'),
|
||||
require('karma-jasmine-html-reporter'),
|
||||
require('karma-coverage'),
|
||||
require('@angular-devkit/build-angular/plugins/karma')
|
||||
],
|
||||
client: {
|
||||
jasmine: {
|
||||
// you can add configuration options for Jasmine here
|
||||
// the possible options are listed at https://jasmine.github.io/api/edge/Configuration.html
|
||||
// for example, you can disable the random execution with `random: false`
|
||||
// or set a specific seed with `seed: 4321`
|
||||
},
|
||||
clearContext: false // leave Jasmine Spec Runner output visible in browser
|
||||
},
|
||||
jasmineHtmlReporter: {
|
||||
suppressAll: true // removes the duplicated traces
|
||||
},
|
||||
coverageReporter: {
|
||||
dir: require('path').join(__dirname, './coverage/ui'),
|
||||
subdir: '.',
|
||||
reporters: [
|
||||
{ type: 'html' },
|
||||
{ type: 'text-summary' }
|
||||
]
|
||||
},
|
||||
reporters: ['progress', 'kjhtml'],
|
||||
port: 9876,
|
||||
colors: true,
|
||||
logLevel: config.LOG_INFO,
|
||||
autoWatch: true,
|
||||
browsers: ['Chrome'],
|
||||
singleRun: false,
|
||||
restartOnFileChange: true
|
||||
});
|
||||
};
|
12910
ui/package-lock.json
generated
Normal file
12910
ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
43
ui/package.json
Normal file
43
ui/package.json
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "ui",
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"ng": "ng",
|
||||
"start": "ng serve",
|
||||
"build": "ng build",
|
||||
"devbuild": "ng build --configuration development",
|
||||
"watch": "ng build --watch --configuration development",
|
||||
"test": "ng test"
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "^14.3.0",
|
||||
"@angular/cdk": "^13.0.0",
|
||||
"@angular/common": "^14.3.0",
|
||||
"@angular/compiler": "^14.3.0",
|
||||
"@angular/core": "^14.3.0",
|
||||
"@angular/forms": "^14.3.0",
|
||||
"@angular/material": "^13.0.0",
|
||||
"@angular/platform-browser": "^14.3.0",
|
||||
"@angular/platform-browser-dynamic": "^14.3.0",
|
||||
"@angular/router": "^14.3.0",
|
||||
"ng-openapi-gen": "^0.25.1",
|
||||
"rxjs": "~7.4.0",
|
||||
"tslib": "^2.3.0",
|
||||
"zone.js": "~0.11.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "^14.2.12",
|
||||
"@angular/cli": "^14.2.12",
|
||||
"@angular/compiler-cli": "^14.3.0",
|
||||
"@types/jasmine": "~3.10.0",
|
||||
"@types/node": "^12.11.1",
|
||||
"jasmine-core": "~3.10.0",
|
||||
"karma": "~6.3.0",
|
||||
"karma-chrome-launcher": "~3.1.0",
|
||||
"karma-coverage": "~2.1.0",
|
||||
"karma-jasmine": "~4.0.0",
|
||||
"karma-jasmine-html-reporter": "~1.7.0",
|
||||
"typescript": "~4.8.4"
|
||||
}
|
||||
}
|
20
ui/src/app/api/api-configuration.ts
Normal file
20
ui/src/app/api/api-configuration.ts
Normal file
@ -0,0 +1,20 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Injectable } from '@angular/core';
|
||||
|
||||
/**
|
||||
* Global configuration
|
||||
*/
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class ApiConfiguration {
|
||||
rootUrl: string = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for `ApiModule.forRoot()`
|
||||
*/
|
||||
export interface ApiConfigurationParams {
|
||||
rootUrl?: string;
|
||||
}
|
46
ui/src/app/api/api.module.ts
Normal file
46
ui/src/app/api/api.module.ts
Normal file
@ -0,0 +1,46 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { NgModule, ModuleWithProviders, SkipSelf, Optional } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { ApiConfiguration, ApiConfigurationParams } from './api-configuration';
|
||||
|
||||
import { PkgdashService } from './services/pkgdash.service';
|
||||
|
||||
/**
|
||||
* Module that provides all services and configuration.
|
||||
*/
|
||||
@NgModule({
|
||||
imports: [],
|
||||
exports: [],
|
||||
declarations: [],
|
||||
providers: [
|
||||
PkgdashService,
|
||||
ApiConfiguration
|
||||
],
|
||||
})
|
||||
export class ApiModule {
|
||||
static forRoot(params: ApiConfigurationParams): ModuleWithProviders<ApiModule> {
|
||||
return {
|
||||
ngModule: ApiModule,
|
||||
providers: [
|
||||
{
|
||||
provide: ApiConfiguration,
|
||||
useValue: params
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
constructor(
|
||||
@Optional() @SkipSelf() parentModule: ApiModule,
|
||||
@Optional() http: HttpClient
|
||||
) {
|
||||
if (parentModule) {
|
||||
throw new Error('ApiModule is already loaded. Import in your base AppModule only.');
|
||||
}
|
||||
if (!http) {
|
||||
throw new Error('You need to import the HttpClientModule in your AppModule! \n' +
|
||||
'See also https://github.com/angular/angular/issues/20575');
|
||||
}
|
||||
}
|
||||
}
|
34
ui/src/app/api/base-service.ts
Normal file
34
ui/src/app/api/base-service.ts
Normal file
@ -0,0 +1,34 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Injectable } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { ApiConfiguration } from './api-configuration';
|
||||
|
||||
/**
|
||||
* Base class for services
|
||||
*/
|
||||
@Injectable()
|
||||
export class BaseService {
|
||||
constructor(
|
||||
protected config: ApiConfiguration,
|
||||
protected http: HttpClient
|
||||
) {
|
||||
}
|
||||
|
||||
private _rootUrl: string = '';
|
||||
|
||||
/**
|
||||
* Returns the root url for all operations in this service. If not set directly in this
|
||||
* service, will fallback to `ApiConfiguration.rootUrl`.
|
||||
*/
|
||||
get rootUrl(): string {
|
||||
return this._rootUrl || this.config.rootUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the root URL for API operations in this service.
|
||||
*/
|
||||
set rootUrl(rootUrl: string) {
|
||||
this._rootUrl = rootUrl;
|
||||
}
|
||||
}
|
22
ui/src/app/api/models.ts
Normal file
22
ui/src/app/api/models.ts
Normal file
@ -0,0 +1,22 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export { Comment } from './models/comment';
|
||||
export { CommentCreateReq } from './models/comment-create-req';
|
||||
export { CommentCreateRsp } from './models/comment-create-rsp';
|
||||
export { CommentDeleteRsp } from './models/comment-delete-rsp';
|
||||
export { CommentListRsp } from './models/comment-list-rsp';
|
||||
export { CommentLookupRsp } from './models/comment-lookup-rsp';
|
||||
export { ErrorRsp } from './models/error-rsp';
|
||||
export { Handler } from './models/handler';
|
||||
export { HandlerListRsp } from './models/handler-list-rsp';
|
||||
export { Module } from './models/module';
|
||||
export { ModuleListRsp } from './models/module-list-rsp';
|
||||
export { Package } from './models/package';
|
||||
export { PackageCreateReq } from './models/package-create-req';
|
||||
export { PackageCreateRsp } from './models/package-create-rsp';
|
||||
export { PackageDeleteRsp } from './models/package-delete-rsp';
|
||||
export { PackageListRsp } from './models/package-list-rsp';
|
||||
export { PackageLookupRsp } from './models/package-lookup-rsp';
|
||||
export { PackageModulesRsp } from './models/package-modules-rsp';
|
||||
export { PackageUpdateReq } from './models/package-update-req';
|
||||
export { PackageUpdateRsp } from './models/package-update-rsp';
|
6
ui/src/app/api/models/comment-create-req.ts
Normal file
6
ui/src/app/api/models/comment-create-req.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface CommentCreateReq {
|
||||
comment?: string;
|
||||
package_id?: number;
|
||||
}
|
6
ui/src/app/api/models/comment-create-rsp.ts
Normal file
6
ui/src/app/api/models/comment-create-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Comment } from './comment';
|
||||
export interface CommentCreateRsp {
|
||||
comment?: Comment;
|
||||
}
|
4
ui/src/app/api/models/comment-delete-rsp.ts
Normal file
4
ui/src/app/api/models/comment-delete-rsp.ts
Normal file
@ -0,0 +1,4 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface CommentDeleteRsp {
|
||||
}
|
6
ui/src/app/api/models/comment-list-rsp.ts
Normal file
6
ui/src/app/api/models/comment-list-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Comment } from './comment';
|
||||
export interface CommentListRsp {
|
||||
comments?: Array<Comment>;
|
||||
}
|
6
ui/src/app/api/models/comment-lookup-rsp.ts
Normal file
6
ui/src/app/api/models/comment-lookup-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Comment } from './comment';
|
||||
export interface CommentLookupRsp {
|
||||
comment?: Comment;
|
||||
}
|
9
ui/src/app/api/models/comment.ts
Normal file
9
ui/src/app/api/models/comment.ts
Normal file
@ -0,0 +1,9 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface Comment {
|
||||
comment?: string;
|
||||
created?: string;
|
||||
id?: number;
|
||||
package?: number;
|
||||
updated?: string;
|
||||
}
|
8
ui/src/app/api/models/error-rsp.ts
Normal file
8
ui/src/app/api/models/error-rsp.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface ErrorRsp {
|
||||
code?: string;
|
||||
details?: string;
|
||||
title?: string;
|
||||
uuid?: string;
|
||||
}
|
6
ui/src/app/api/models/handler-list-rsp.ts
Normal file
6
ui/src/app/api/models/handler-list-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Handler } from './handler';
|
||||
export interface HandlerListRsp {
|
||||
handlers?: Array<Handler>;
|
||||
}
|
8
ui/src/app/api/models/handler.ts
Normal file
8
ui/src/app/api/models/handler.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface Handler {
|
||||
coverage?: number;
|
||||
id?: number;
|
||||
name?: string;
|
||||
package?: number;
|
||||
}
|
6
ui/src/app/api/models/module-list-rsp.ts
Normal file
6
ui/src/app/api/models/module-list-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Module } from './module';
|
||||
export interface ModuleListRsp {
|
||||
modules?: Array<Module>;
|
||||
}
|
8
ui/src/app/api/models/module.ts
Normal file
8
ui/src/app/api/models/module.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface Module {
|
||||
id?: number;
|
||||
last_check?: string;
|
||||
name?: string;
|
||||
version?: string;
|
||||
}
|
7
ui/src/app/api/models/package-create-req.ts
Normal file
7
ui/src/app/api/models/package-create-req.ts
Normal file
@ -0,0 +1,7 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface PackageCreateReq {
|
||||
description?: string;
|
||||
name?: string;
|
||||
url?: string;
|
||||
}
|
6
ui/src/app/api/models/package-create-rsp.ts
Normal file
6
ui/src/app/api/models/package-create-rsp.ts
Normal file
@ -0,0 +1,6 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
import { Package } from './package';
|
||||
export interface PackageCreateRsp {
|
||||
package?: Package;
|
||||
}
|
4
ui/src/app/api/models/package-delete-rsp.ts
Normal file
4
ui/src/app/api/models/package-delete-rsp.ts
Normal file
@ -0,0 +1,4 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
export interface PackageDeleteRsp {
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user