Signed-off-by: Vasiliy Tolstov <v.tolstov@unistack.org>
This commit is contained in:
Василий Толстов 2024-12-07 02:35:30 +03:00
parent 8729d0b88e
commit c5f3fa325e
42 changed files with 1316 additions and 2009 deletions

View File

@ -1,15 +1,15 @@
.PHONY: build .PHONY: build
build: build:
GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdash -mod=readonly git.unistack.org/unistack-org/pkgdash/cmd/pkgdash GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdash -mod=readonly go.unistack.org/pkgdash/cmd/pkgdash
GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdashcli -mod=readonly git.unistack.org/unistack-org/pkgdash/cmd/pkgdashcli GOWORK=off CGO_ENABLED=0 go build -o bin/pkgdashcli -mod=readonly go.unistack.org/pkgdash/cmd/pkgdashcli
.PHONY: buildcli .PHONY: buildcli
buildcli: buildcli:
CGO_ENABLED=0 go build -o bin/app -mod=readonly git.unistack.org/unistack-org/pkgdash/cmd/pkgdashcli CGO_ENABLED=0 go build -o bin/app -mod=readonly go.unistack.org/pkgdash/cmd/pkgdashcli
.PHONY: cli .PHONY: cli
cli: cli:
go install git.unistack.org/unistack-org/pkgdash/cmd/pkgdashcli go install go.unistack.org/pkgdash/cmd/pkgdashcli
.PHONY: test .PHONY: test
test: test:

View File

@ -4996,243 +4996,243 @@ __webpack_require__.r(__webpack_exports__);
/* harmony export */ "__spreadArrays": () => (/* binding */ __spreadArrays), /* harmony export */ "__spreadArrays": () => (/* binding */ __spreadArrays),
/* harmony export */ "__values": () => (/* binding */ __values) /* harmony export */ "__values": () => (/* binding */ __values)
/* harmony export */ }); /* harmony export */ });
/*! ***************************************************************************** /*! *****************************************************************************
Copyright (c) Microsoft Corporation. Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted. purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE. PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */ ***************************************************************************** */
/* global Reflect, Promise */ /* global Reflect, Promise */
var extendStatics = function(d, b) { var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf || extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b); return extendStatics(d, b);
}; };
function __extends(d, b) { function __extends(d, b) {
if (typeof b !== "function" && b !== null) if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b); extendStatics(d, b);
function __() { this.constructor = d; } function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
} }
var __assign = function() { var __assign = function() {
__assign = Object.assign || function __assign(t) { __assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) { for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i]; s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
} }
return t; return t;
} }
return __assign.apply(this, arguments); return __assign.apply(this, arguments);
} }
function __rest(s, e) { function __rest(s, e) {
var t = {}; var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p]; t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function") if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]]; t[p[i]] = s[p[i]];
} }
return t; return t;
} }
function __decorate(decorators, target, key, desc) { function __decorate(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r; return c > 3 && r && Object.defineProperty(target, key, r), r;
} }
function __param(paramIndex, decorator) { function __param(paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); } return function (target, key) { decorator(target, key, paramIndex); }
} }
function __metadata(metadataKey, metadataValue) { function __metadata(metadataKey, metadataValue) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
} }
function __awaiter(thisArg, _arguments, P, generator) { function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) { return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next()); step((generator = generator.apply(thisArg, _arguments || [])).next());
}); });
} }
function __generator(thisArg, body) { function __generator(thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; } function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) { function step(op) {
if (f) throw new TypeError("Generator is already executing."); if (f) throw new TypeError("Generator is already executing.");
while (_) try { while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value]; if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) { switch (op[0]) {
case 0: case 1: t = op; break; case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false }; case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue; case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue; case 7: op = _.ops.pop(); _.trys.pop(); continue;
default: default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop(); if (t[2]) _.ops.pop();
_.trys.pop(); continue; _.trys.pop(); continue;
} }
op = body.call(thisArg, _); op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
} }
} }
var __createBinding = Object.create ? (function(o, m, k, k2) { var __createBinding = Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k; if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) { }) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k; if (k2 === undefined) k2 = k;
o[k2] = m[k]; o[k2] = m[k];
}); });
function __exportStar(m, o) { function __exportStar(m, o) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
} }
function __values(o) { function __values(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o); if (m) return m.call(o);
if (o && typeof o.length === "number") return { if (o && typeof o.length === "number") return {
next: function () { next: function () {
if (o && i >= o.length) o = void 0; if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o }; return { value: o && o[i++], done: !o };
} }
}; };
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
} }
function __read(o, n) { function __read(o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator]; var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o; if (!m) return o;
var i = m.call(o), r, ar = [], e; var i = m.call(o), r, ar = [], e;
try { try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
} }
catch (error) { e = { error: error }; } catch (error) { e = { error: error }; }
finally { finally {
try { try {
if (r && !r.done && (m = i["return"])) m.call(i); if (r && !r.done && (m = i["return"])) m.call(i);
} }
finally { if (e) throw e.error; } finally { if (e) throw e.error; }
} }
return ar; return ar;
} }
/** @deprecated */ /** @deprecated */
function __spread() { function __spread() {
for (var ar = [], i = 0; i < arguments.length; i++) for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i])); ar = ar.concat(__read(arguments[i]));
return ar; return ar;
} }
/** @deprecated */ /** @deprecated */
function __spreadArrays() { function __spreadArrays() {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++) for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j]; r[k] = a[j];
return r; return r;
} }
function __spreadArray(to, from) { function __spreadArray(to, from) {
for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) for (var i = 0, il = from.length, j = to.length; i < il; i++, j++)
to[j] = from[i]; to[j] = from[i];
return to; return to;
} }
function __await(v) { function __await(v) {
return this instanceof __await ? (this.v = v, this) : new __await(v); return this instanceof __await ? (this.v = v, this) : new __await(v);
} }
function __asyncGenerator(thisArg, _arguments, generator) { function __asyncGenerator(thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = []; var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); } function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); } function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
} }
function __asyncDelegator(o) { function __asyncDelegator(o) {
var i, p; var i, p;
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
} }
function __asyncValues(o) { function __asyncValues(o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i; var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
} }
function __makeTemplateObject(cooked, raw) { function __makeTemplateObject(cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked; return cooked;
}; };
var __setModuleDefault = Object.create ? (function(o, v) { var __setModuleDefault = Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v }); Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) { }) : function(o, v) {
o["default"] = v; o["default"] = v;
}; };
function __importStar(mod) { function __importStar(mod) {
if (mod && mod.__esModule) return mod; if (mod && mod.__esModule) return mod;
var result = {}; var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod); __setModuleDefault(result, mod);
return result; return result;
} }
function __importDefault(mod) { function __importDefault(mod) {
return (mod && mod.__esModule) ? mod : { default: mod }; return (mod && mod.__esModule) ? mod : { default: mod };
} }
function __classPrivateFieldGet(receiver, privateMap) { function __classPrivateFieldGet(receiver, privateMap) {
if (!privateMap.has(receiver)) { if (!privateMap.has(receiver)) {
throw new TypeError("attempted to get private field on non-instance"); throw new TypeError("attempted to get private field on non-instance");
} }
return privateMap.get(receiver); return privateMap.get(receiver);
} }
function __classPrivateFieldSet(receiver, privateMap, value) { function __classPrivateFieldSet(receiver, privateMap, value) {
if (!privateMap.has(receiver)) { if (!privateMap.has(receiver)) {
throw new TypeError("attempted to set private field on non-instance"); throw new TypeError("attempted to set private field on non-instance");
} }
privateMap.set(receiver, value); privateMap.set(receiver, value);
return value; return value;
} }
/***/ }), /***/ }),
@ -51956,7 +51956,7 @@ function ɵɵresolveBody(element) {
*/ */
const INTERPOLATION_DELIMITER = `<60>`; const INTERPOLATION_DELIMITER = `<EFBFBD>`;
/** /**
* Unwrap a value which might be behind a closure (for forward declaration reasons). * Unwrap a value which might be behind a closure (for forward declaration reasons).
*/ */
@ -52069,7 +52069,7 @@ function getExpressionChangedErrorDetails(lView, bindingIndex, oldValue, newValu
} // metadata is not available for this expression, check if this expression is a part of the } // metadata is not available for this expression, check if this expression is a part of the
// property interpolation by going from the current binding index left and look for a string that // property interpolation by going from the current binding index left and look for a string that
// contains INTERPOLATION_DELIMITER, the layout in tView.data for this case will look like this: // contains INTERPOLATION_DELIMITER, the layout in tView.data for this case will look like this:
// [..., 'id<69>Prefix <20> and <20> suffix', null, null, null, ...] // [..., 'id<EFBFBD>Prefix <20> and <20> suffix', null, null, null, ...]
if (metadata === null) { if (metadata === null) {
@ -56810,7 +56810,7 @@ function executeViewQueryFn(flags, viewQueryFn, component) {
* `LView`). Metadata are represented as `INTERPOLATION_DELIMITER`-delimited string with the * `LView`). Metadata are represented as `INTERPOLATION_DELIMITER`-delimited string with the
* following format: * following format:
* - `propertyName` for bound properties; * - `propertyName` for bound properties;
* - `propertyName<6D>prefix<69>interpolation_static_part1<74>..interpolation_static_partN<74>suffix` for * - `propertyName<EFBFBD>prefix<EFBFBD>interpolation_static_part1<EFBFBD>..interpolation_static_partN<74>suffix` for
* interpolated properties. * interpolated properties.
* *
* @param tData `TData` where meta-data will be saved; * @param tData `TData` where meta-data will be saved;
@ -64853,13 +64853,13 @@ class OpCodeParser {
*/ */
const BINDING_REGEXP = /<2F>(\d+):?\d*<2A>/gi; const BINDING_REGEXP = /<EFBFBD>(\d+):?\d*<2A>/gi;
const ICU_REGEXP = /({\s*<2A>\d+:?\d*<2A>\s*,\s*\S{6}\s*,[\s\S]*})/gi; const ICU_REGEXP = /({\s*<EFBFBD>\d+:?\d*<2A>\s*,\s*\S{6}\s*,[\s\S]*})/gi;
const NESTED_ICU = /<2F>(\d+)<29>/; const NESTED_ICU = /<EFBFBD>(\d+)<29>/;
const ICU_BLOCK_REGEXP = /^\s*(<28>\d+:?\d*<2A>)\s*,\s*(select|plural)\s*,/; const ICU_BLOCK_REGEXP = /^\s*(<EFBFBD>\d+:?\d*<2A>)\s*,\s*(select|plural)\s*,/;
const MARKER = `<60>`; const MARKER = `<EFBFBD>`;
const SUBTEMPLATE_REGEXP = /<2F>\/?\*(\d+:\d+)<29>/gi; const SUBTEMPLATE_REGEXP = /<EFBFBD>\/?\*(\d+:\d+)<29>/gi;
const PH_REGEXP = /<2F>(\/?[#*]\d+):?\d*<2A>/gi; const PH_REGEXP = /<EFBFBD>(\/?[#*]\d+):?\d*<2A>/gi;
/** /**
* Angular Dart introduced &ngsp; as a placeholder for non-removable space, see: * Angular Dart introduced &ngsp; as a placeholder for non-removable space, see:
* https://github.com/dart-lang/angular/blob/0bb611387d29d65b5af7f9d2515ab571fd3fbee4/_tests/test/compiler/preserve_whitespace_test.dart#L25-L32 * https://github.com/dart-lang/angular/blob/0bb611387d29d65b5af7f9d2515ab571fd3fbee4/_tests/test/compiler/preserve_whitespace_test.dart#L25-L32
@ -64942,7 +64942,7 @@ function i18nStartFirstCreatePass(tView, parentTNodeIndex, lView, index, message
} }
} else { } else {
// Odd indexes are placeholders (elements and sub-templates) // Odd indexes are placeholders (elements and sub-templates)
// At this point value is something like: '/#1:2' (originally coming from '<27>/#1:2<>') // At this point value is something like: '/#1:2' (originally coming from '<EFBFBD>/#1:2<>')
const isClosing = value.charCodeAt(0) === 47 const isClosing = value.charCodeAt(0) === 47
/* CharCode.SLASH */ /* CharCode.SLASH */
; ;
@ -65298,7 +65298,7 @@ function icuStart(tView, lView, updateOpCodes, parentIdx, icuExpression, anchorI
// It is an nested ICU expression // It is an nested ICU expression
const icuIndex = nestedIcus.push(value) - 1; // Replace nested ICU expression by a comment node const icuIndex = nestedIcus.push(value) - 1; // Replace nested ICU expression by a comment node
valueArr[j] = `<!--<2D>${icuIndex}<7D>-->`; valueArr[j] = `<!--<EFBFBD>${icuIndex}<EFBFBD>-->`;
} }
} }
@ -65597,11 +65597,11 @@ function addCreateAttribute(create, newIndex, attr) {
const ROOT_TEMPLATE_ID = 0; const ROOT_TEMPLATE_ID = 0;
const PP_MULTI_VALUE_PLACEHOLDERS_REGEXP = /\[(<28>.+?<3F>?)\]/; const PP_MULTI_VALUE_PLACEHOLDERS_REGEXP = /\[(<EFBFBD>.+?<3F>?)\]/;
const PP_PLACEHOLDERS_REGEXP = /\[(<28>.+?<3F>?)\]|(<28>\/?\*\d+:\d+<2B>)/g; const PP_PLACEHOLDERS_REGEXP = /\[(<EFBFBD>.+?<3F>?)\]|(<28>\/?\*\d+:\d+<2B>)/g;
const PP_ICU_VARS_REGEXP = /({\s*)(VAR_(PLURAL|SELECT)(_\d+)?)(\s*,)/g; const PP_ICU_VARS_REGEXP = /({\s*)(VAR_(PLURAL|SELECT)(_\d+)?)(\s*,)/g;
const PP_ICU_PLACEHOLDERS_REGEXP = /{([A-Z0-9_]+)}/g; const PP_ICU_PLACEHOLDERS_REGEXP = /{([A-Z0-9_]+)}/g;
const PP_ICUS_REGEXP = /<2F>I18N_EXP_(ICU(_\d+)?)<29>/g; const PP_ICUS_REGEXP = /<EFBFBD>I18N_EXP_(ICU(_\d+)?)<29>/g;
const PP_CLOSE_TEMPLATE_REGEXP = /\/\*/; const PP_CLOSE_TEMPLATE_REGEXP = /\/\*/;
const PP_TEMPLATE_ID_REGEXP = /\d+\:(\d+)/; const PP_TEMPLATE_ID_REGEXP = /\d+\:(\d+)/;
/** /**
@ -65611,10 +65611,10 @@ const PP_TEMPLATE_ID_REGEXP = /\d+\:(\d+)/;
* format (that might contain some markers that we need to replace) to the final * format (that might contain some markers that we need to replace) to the final
* form, consumable by i18nStart instruction. Post processing steps include: * form, consumable by i18nStart instruction. Post processing steps include:
* *
* 1. Resolve all multi-value cases (like [<5B>*1:1<><31>#2:1<>|<7C>#4:1<>|<7C>5<EFBFBD>]) * 1. Resolve all multi-value cases (like [<EFBFBD>*1:1<EFBFBD><EFBFBD>#2:1<EFBFBD>|<EFBFBD>#4:1<EFBFBD>|<EFBFBD>5<EFBFBD>])
* 2. Replace all ICU vars (like "VAR_PLURAL") * 2. Replace all ICU vars (like "VAR_PLURAL")
* 3. Replace all placeholders used inside ICUs in a form of {PLACEHOLDER} * 3. Replace all placeholders used inside ICUs in a form of {PLACEHOLDER}
* 4. Replace all ICU references with corresponding values (like <20>ICU_EXP_ICU_1<5F>) * 4. Replace all ICU references with corresponding values (like <EFBFBD>ICU_EXP_ICU_1<EFBFBD>)
* in case multiple ICUs have the same placeholder name * in case multiple ICUs have the same placeholder name
* *
* @param message Raw translation string for post processing * @param message Raw translation string for post processing
@ -65627,13 +65627,13 @@ const PP_TEMPLATE_ID_REGEXP = /\d+\:(\d+)/;
function i18nPostprocess(message, replacements = {}) { function i18nPostprocess(message, replacements = {}) {
/** /**
* Step 1: resolve all multi-value placeholders like [<5B>#5<>|<7C>*1:1<><31>#2:1<>|<7C>#4:1<>] * Step 1: resolve all multi-value placeholders like [<EFBFBD>#5<EFBFBD>|<EFBFBD>*1:1<EFBFBD><EFBFBD>#2:1<EFBFBD>|<EFBFBD>#4:1<EFBFBD>]
* *
* Note: due to the way we process nested templates (BFS), multi-value placeholders are typically * Note: due to the way we process nested templates (BFS), multi-value placeholders are typically
* grouped by templates, for example: [<5B>#5<>|<7C>#6<>|<7C>#1:1<>|<7C>#3:2<>] where <20>#5<> and <20>#6<> belong to root * grouped by templates, for example: [<EFBFBD>#5<EFBFBD>|<EFBFBD>#6<EFBFBD>|<EFBFBD>#1:1<EFBFBD>|<EFBFBD>#3:2<EFBFBD>] where <EFBFBD>#5<EFBFBD> and <EFBFBD>#6<EFBFBD> belong to root
* template, <20>#1:1<> belong to nested template with index 1 and <20>#1:2<> - nested template with index * template, <EFBFBD>#1:1<EFBFBD> belong to nested template with index 1 and <EFBFBD>#1:2<EFBFBD> - nested template with index
* 3. However in real templates the order might be different: i.e. <20>#1:1<> and/or <20>#3:2<> may go in * 3. However in real templates the order might be different: i.e. <EFBFBD>#1:1<EFBFBD> and/or <EFBFBD>#3:2<EFBFBD> may go in
* front of <20>#6<>. The post processing step restores the right order by keeping track of the * front of <EFBFBD>#6<EFBFBD>. The post processing step restores the right order by keeping track of the
* template id stack and looks for placeholders that belong to the currently active template. * template id stack and looks for placeholders that belong to the currently active template.
*/ */
let result = message; let result = message;
@ -65704,7 +65704,7 @@ function i18nPostprocess(message, replacements = {}) {
return replacements.hasOwnProperty(key) ? replacements[key] : match; return replacements.hasOwnProperty(key) ? replacements[key] : match;
}); });
/** /**
* Step 4: replace all ICU references with corresponding values (like <20>ICU_EXP_ICU_1<5F>) in case * Step 4: replace all ICU references with corresponding values (like <EFBFBD>ICU_EXP_ICU_1<EFBFBD>) in case
* multiple ICUs have the same placeholder name * multiple ICUs have the same placeholder name
*/ */
@ -65739,14 +65739,14 @@ function i18nPostprocess(message, replacements = {}) {
* contain placeholders which associate inner elements and sub-templates within the translation. * contain placeholders which associate inner elements and sub-templates within the translation.
* *
* The translation `message` placeholders are: * The translation `message` placeholders are:
* - `<60>{index}(:{block})<29>`: *Binding Placeholder*: Marks a location where an expression will be * - `<EFBFBD>{index}(:{block})<29>`: *Binding Placeholder*: Marks a location where an expression will be
* interpolated into. The placeholder `index` points to the expression binding index. An optional * interpolated into. The placeholder `index` points to the expression binding index. An optional
* `block` that matches the sub-template in which it was declared. * `block` that matches the sub-template in which it was declared.
* - `<60>#{index}(:{block})<29>`/`<60>/#{index}(:{block})<29>`: *Element Placeholder*: Marks the beginning * - `<EFBFBD>#{index}(:{block})<29>`/`<EFBFBD>/#{index}(:{block})<29>`: *Element Placeholder*: Marks the beginning
* and end of DOM element that were embedded in the original translation block. The placeholder * and end of DOM element that were embedded in the original translation block. The placeholder
* `index` points to the element index in the template instructions set. An optional `block` that * `index` points to the element index in the template instructions set. An optional `block` that
* matches the sub-template in which it was declared. * matches the sub-template in which it was declared.
* - `<60>*{index}:{block}<7D>`/`<60>/*{index}:{block}<7D>`: *Sub-template Placeholder*: Sub-templates must be * - `<EFBFBD>*{index}:{block}<7D>`/`<EFBFBD>/*{index}:{block}<7D>`: *Sub-template Placeholder*: Sub-templates must be
* split up and translated separately in each angular template function. The `index` points to the * split up and translated separately in each angular template function. The `index` points to the
* `template` instruction index. A `block` that matches the sub-template in which it was declared. * `template` instruction index. A `block` that matches the sub-template in which it was declared.
* *
@ -65801,14 +65801,14 @@ function ɵɵi18nEnd() {
* contain placeholders which associate inner elements and sub-templates within the translation. * contain placeholders which associate inner elements and sub-templates within the translation.
* *
* The translation `message` placeholders are: * The translation `message` placeholders are:
* - `<60>{index}(:{block})<29>`: *Binding Placeholder*: Marks a location where an expression will be * - `<EFBFBD>{index}(:{block})<29>`: *Binding Placeholder*: Marks a location where an expression will be
* interpolated into. The placeholder `index` points to the expression binding index. An optional * interpolated into. The placeholder `index` points to the expression binding index. An optional
* `block` that matches the sub-template in which it was declared. * `block` that matches the sub-template in which it was declared.
* - `<60>#{index}(:{block})<29>`/`<60>/#{index}(:{block})<29>`: *Element Placeholder*: Marks the beginning * - `<EFBFBD>#{index}(:{block})<29>`/`<EFBFBD>/#{index}(:{block})<29>`: *Element Placeholder*: Marks the beginning
* and end of DOM element that were embedded in the original translation block. The placeholder * and end of DOM element that were embedded in the original translation block. The placeholder
* `index` points to the element index in the template instructions set. An optional `block` that * `index` points to the element index in the template instructions set. An optional `block` that
* matches the sub-template in which it was declared. * matches the sub-template in which it was declared.
* - `<60>*{index}:{block}<7D>`/`<60>/*{index}:{block}<7D>`: *Sub-template Placeholder*: Sub-templates must be * - `<EFBFBD>*{index}:{block}<7D>`/`<EFBFBD>/*{index}:{block}<7D>`: *Sub-template Placeholder*: Sub-templates must be
* split up and translated separately in each angular template function. The `index` points to the * split up and translated separately in each angular template function. The `index` points to the
* `template` instruction index. A `block` that matches the sub-template in which it was declared. * `template` instruction index. A `block` that matches the sub-template in which it was declared.
* *
@ -65877,10 +65877,10 @@ function ɵɵi18nApply(index) {
* format (that might contain some markers that we need to replace) to the final * format (that might contain some markers that we need to replace) to the final
* form, consumable by i18nStart instruction. Post processing steps include: * form, consumable by i18nStart instruction. Post processing steps include:
* *
* 1. Resolve all multi-value cases (like [<5B>*1:1<><31>#2:1<>|<7C>#4:1<>|<7C>5<EFBFBD>]) * 1. Resolve all multi-value cases (like [<EFBFBD>*1:1<EFBFBD><EFBFBD>#2:1<EFBFBD>|<EFBFBD>#4:1<EFBFBD>|<EFBFBD>5<EFBFBD>])
* 2. Replace all ICU vars (like "VAR_PLURAL") * 2. Replace all ICU vars (like "VAR_PLURAL")
* 3. Replace all placeholders used inside ICUs in a form of {PLACEHOLDER} * 3. Replace all placeholders used inside ICUs in a form of {PLACEHOLDER}
* 4. Replace all ICU references with corresponding values (like <20>ICU_EXP_ICU_1<5F>) * 4. Replace all ICU references with corresponding values (like <EFBFBD>ICU_EXP_ICU_1<EFBFBD>)
* in case multiple ICUs have the same placeholder name * in case multiple ICUs have the same placeholder name
* *
* @param message Raw translation string for post processing * @param message Raw translation string for post processing

File diff suppressed because one or more lines are too long

View File

@ -7,33 +7,33 @@ import (
"io/fs" "io/fs"
"net/http" "net/http"
"time" "time"
slog "go.unistack.org/micro/v4/logger/slog"
appconfig "git.unistack.org/unistack-org/pkgdash/internal/config" jsoncodec "go.unistack.org/micro-codec-json/v3"
"git.unistack.org/unistack-org/pkgdash/internal/database" jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v3"
"git.unistack.org/unistack-org/pkgdash/internal/handler" yamlcodec "go.unistack.org/micro-codec-yaml/v3"
"git.unistack.org/unistack-org/pkgdash/internal/storage" envconfig "go.unistack.org/micro-config-env/v3"
_ "git.unistack.org/unistack-org/pkgdash/internal/storage/sqlite" fileconfig "go.unistack.org/micro-config-file/v3"
"git.unistack.org/unistack-org/pkgdash/internal/worker" vaultconfig "go.unistack.org/micro-config-vault/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" victoriameter "go.unistack.org/micro-meter-victoriametrics/v3"
jsoncodec "go.unistack.org/micro-codec-json/v4" httpsrv "go.unistack.org/micro-server-http/v3"
jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v4" healthhandler "go.unistack.org/micro-server-http/v3/handler/health"
yamlcodec "go.unistack.org/micro-codec-yaml/v4" meterhandler "go.unistack.org/micro-server-http/v3/handler/meter"
envconfig "go.unistack.org/micro-config-env/v4" spahandler "go.unistack.org/micro-server-http/v3/handler/spa"
fileconfig "go.unistack.org/micro-config-file/v4" swaggerui "go.unistack.org/micro-server-http/v3/handler/swagger-ui"
vaultconfig "go.unistack.org/micro-config-vault/v4" "go.unistack.org/micro/v3"
victoriameter "go.unistack.org/micro-meter-victoriametrics/v4" "go.unistack.org/micro/v3/config"
httpsrv "go.unistack.org/micro-server-http/v4" "go.unistack.org/micro/v3/logger"
healthhandler "go.unistack.org/micro-server-http/v4/handler/health" slog "go.unistack.org/micro/v3/logger/slog"
meterhandler "go.unistack.org/micro-server-http/v4/handler/meter" "go.unistack.org/micro/v3/meter"
spahandler "go.unistack.org/micro-server-http/v4/handler/spa" "go.unistack.org/micro/v3/server"
swaggerui "go.unistack.org/micro-server-http/v4/handler/swagger-ui" rutil "go.unistack.org/micro/v3/util/reflect"
"go.unistack.org/micro/v4" appconfig "go.unistack.org/pkgdash/internal/config"
"go.unistack.org/micro/v4/config" "go.unistack.org/pkgdash/internal/database"
"go.unistack.org/micro/v4/logger" "go.unistack.org/pkgdash/internal/handler"
"go.unistack.org/micro/v4/meter" "go.unistack.org/pkgdash/internal/storage"
"go.unistack.org/micro/v4/options" _ "go.unistack.org/pkgdash/internal/storage/sqlite"
"go.unistack.org/micro/v4/server" "go.unistack.org/pkgdash/internal/worker"
rutil "go.unistack.org/micro/v4/util/reflect" pb "go.unistack.org/pkgdash/proto"
) )
const appName = "pkgdash" const appName = "pkgdash"
@ -53,16 +53,16 @@ func main() {
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
defer cancel() defer cancel()
logger.DefaultLogger = slog.NewLogger(logger.WithLevel(logger.DebugLevel)) log := slog.NewLogger(logger.WithLevel(logger.DebugLevel))
if err := logger.DefaultLogger.Init(); err != nil { if err := log.Init(); err != nil {
logger.Fatal(ctx, "failed to init logger") log.Fatal(ctx, "failed to init logger")
} }
cfg := appconfig.NewConfig(appName, AppVersion) // create new empty config cfg := appconfig.NewConfig(appName, AppVersion) // create new empty config
vc := vaultconfig.NewConfig( vc := vaultconfig.NewConfig(
config.AllowFail(true), // that may be not exists config.AllowFail(true), // that may be not exists
config.Struct(cfg), // load from vault config.Struct(cfg), // load from vault
options.Codec(jsoncodec.NewCodec()), // vault config in json config.Codec(jsoncodec.NewCodec()), // vault config in json
config.BeforeLoad(func(ctx context.Context, c config.Config) error { config.BeforeLoad(func(ctx context.Context, c config.Config) error {
return c.Init( return c.Init(
vaultconfig.HTTPClient(&http.Client{ vaultconfig.HTTPClient(&http.Client{
@ -84,10 +84,10 @@ func main() {
config.Struct(cfg), // pass config struct config.Struct(cfg), // pass config struct
), ),
fileconfig.NewConfig( // load from file fileconfig.NewConfig( // load from file
config.AllowFail(true), // that may be not exists config.AllowFail(true), // that may be not exists
config.Struct(cfg), // pass config struct config.Struct(cfg), // pass config struct
options.Codec(yamlcodec.NewCodec()), // file config in json config.Codec(yamlcodec.NewCodec()), // file config in json
fileconfig.Path("./local.yaml"), // nearby file fileconfig.Path("./local.yaml"), // nearby file
), ),
envconfig.NewConfig( // load from environment envconfig.NewConfig( // load from environment
config.Struct(cfg), // pass config struct config.Struct(cfg), // pass config struct
@ -95,11 +95,11 @@ func main() {
vc, vc,
}, config.LoadOverride(true), }, config.LoadOverride(true),
); err != nil { ); err != nil {
logger.Fatal(ctx, "failed to load config: %v", err) log.Fatal(ctx, "failed to load config: %v", err)
} }
if err := config.Validate(ctx, cfg); err != nil { if err := config.Validate(ctx, cfg); err != nil {
logger.Fatal(ctx, "failed to validate config: %v", err) log.Fatal(ctx, "failed to validate config: %v", err)
} }
swaggerui.Config["url"] = "../service.swagger.yaml" swaggerui.Config["url"] = "../service.swagger.yaml"
@ -108,7 +108,7 @@ func main() {
meter.Path(cfg.Meter.Path), meter.Path(cfg.Meter.Path),
meter.WriteFDMetrics(true), meter.WriteFDMetrics(true),
meter.WriteProcessMetrics(true), meter.WriteProcessMetrics(true),
options.Address(cfg.Meter.Addr), meter.Address(cfg.Meter.Addr),
) )
svc := micro.NewService() svc := micro.NewService()
@ -118,89 +118,86 @@ func main() {
micro.Name(cfg.Server.Name), micro.Name(cfg.Server.Name),
micro.Version(cfg.Server.Version), micro.Version(cfg.Server.Version),
); err != nil { ); err != nil {
logger.Fatal(ctx, "failed to init service: %v", err) log.Fatal(ctx, "failed to init service: %v", err)
} }
assetsUI, err := fs.Sub(assets, "assets/ui") assetsUI, err := fs.Sub(assets, "assets/ui")
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to get assets: %v", err) log.Fatal(ctx, "failed to get assets: %v", err)
} }
if err := svc.Server("http").Init( if err := svc.Server("http").Init(
options.Address(cfg.Server.Addr), server.Address(cfg.Server.Addr),
options.Name(cfg.Server.Name), server.Name(cfg.Server.Name),
server.Version(cfg.Server.Version), server.Version(cfg.Server.Version),
options.Codecs("application/json", jsonpbcodec.NewCodec()), server.Codec("application/json", jsonpbcodec.NewCodec()),
options.Address(cfg.Server.Addr),
options.Context(ctx),
httpsrv.PathHandler(http.MethodGet, "/ui/*", spahandler.Handler("/ui/", assetsUI)), httpsrv.PathHandler(http.MethodGet, "/ui/*", spahandler.Handler("/ui/", assetsUI)),
httpsrv.PathHandler(http.MethodHead, "/ui/*", spahandler.Handler("/ui/", assetsUI)), httpsrv.PathHandler(http.MethodHead, "/ui/*", spahandler.Handler("/ui/", assetsUI)),
httpsrv.PathHandler(http.MethodGet, "/swagger-ui/*", swaggerui.Handler("/swagger-ui")), httpsrv.PathHandler(http.MethodGet, "/swagger-ui/*", swaggerui.Handler("/swagger-ui")),
); err != nil { ); err != nil {
logger.Fatal(ctx, "failed to init service: %v", err) log.Fatal(ctx, "failed to init service: %v", err)
} }
if err := database.ParseDSN(cfg.Database); err != nil { if err := database.ParseDSN(cfg.Database); err != nil {
logger.Fatal(ctx, "failed to init database: %v", err) log.Fatal(ctx, "failed to init database: %v", err)
} }
db, err := database.Connect(ctx, cfg.Database, logger.DefaultLogger) db, err := database.Connect(ctx, cfg.Database, log)
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to connect database: %v", err) log.Fatal(ctx, "failed to connect database: %v", err)
} }
store, err := storage.NewStorage(cfg.Database.Type, db) store, err := storage.NewStorage(cfg.Database.Type, log, db)
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to init storage: %v", err) log.Fatal(ctx, "failed to init storage: %v", err)
} }
h, err := handler.NewHandler(store) h, err := handler.NewHandler(log, store)
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to create handler: %v", err) log.Fatal(ctx, "failed to create handler: %v", err)
} }
log := logger.NewLogger( if err := svc.Init(
logger.WithLevel(logger.ParseLevel(cfg.Server.LoggerLevel)), micro.Logger(
logger.WithCallerSkipCount(3), log.Clone(logger.WithLevel(logger.ParseLevel(cfg.Server.LoggerLevel))),
) ),
if err := svc.Init(micro.Logger(log)); err != nil { ); err != nil {
logger.Fatal(ctx, "failed to init service: %v", err) log.Fatal(ctx, "failed to init service", err)
} }
if err := pb.RegisterPkgdashServiceServer(svc.Server("http"), h); err != nil { if err := pb.RegisterPkgdashServer(svc.Server("http"), h); err != nil {
logger.Fatal(ctx, "failed to register handler: %v", err) log.Fatal(ctx, "failed to register handler", err)
} }
intsvc := httpsrv.NewServer( intsvc := httpsrv.NewServer(
options.Codecs("application/json", jsoncodec.NewCodec()), server.Codec("application/json", jsoncodec.NewCodec()),
options.Address(cfg.Meter.Addr), server.Address(cfg.Meter.Addr),
options.Context(ctx),
) )
if err := intsvc.Init(); err != nil { if err := intsvc.Init(); err != nil {
logger.Fatal(ctx, "failed to init http srv: %v", err) log.Fatal(ctx, "failed to init http srv: %v", err)
} }
if err := healthhandler.RegisterHealthServiceServer(intsvc, healthhandler.NewHandler()); err != nil { if err := healthhandler.RegisterHealthServiceServer(intsvc, healthhandler.NewHandler()); err != nil {
logger.Fatal(ctx, "failed to set http handler: %v", err) log.Fatal(ctx, "failed to set http handler: %v", err)
} }
if err := meterhandler.RegisterMeterServiceServer(intsvc, meterhandler.NewHandler()); err != nil { if err := meterhandler.RegisterMeterServiceServer(intsvc, meterhandler.NewHandler()); err != nil {
logger.Fatal(ctx, "failed to set http handler: %v", err) log.Fatal(ctx, "failed to set http handler: %v", err)
} }
if err := intsvc.Start(); err != nil { if err := intsvc.Start(); err != nil {
logger.Fatal(ctx, "failed to run http srv: %v", err) log.Fatal(ctx, "failed to run http srv: %v", err)
} }
cw, err := vc.Watch(ctx, config.WatchCoalesce(true), config.WatchInterval(1*time.Second, 5*time.Second)) cw, err := vc.Watch(ctx, config.WatchCoalesce(true), config.WatchInterval(1*time.Second, 5*time.Second))
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to watch config: %v", err) log.Fatal(ctx, "failed to watch config: %v", err)
} }
defer func() { defer func() {
if err := cw.Stop(); err != nil { if err := cw.Stop(); err != nil {
logger.Error(ctx, err.Error()) log.Error(ctx, err.Error())
} }
}() }()
@ -208,11 +205,11 @@ func main() {
for { for {
changes, err := cw.Next() changes, err := cw.Next()
if err != nil { if err != nil {
logger.Error(ctx, "failed to get config update: %v", err) log.Error(ctx, "failed to get config update: %v", err)
} }
for k, v := range changes { for k, v := range changes {
if err = rutil.SetFieldByPath(cfg, v, k); err != nil { if err = rutil.SetFieldByPath(cfg, v, k); err != nil {
logger.Error(ctx, "failed to set config update: %v", err) log.Error(ctx, "failed to set config update: %v", err)
break break
} }
} }
@ -221,8 +218,8 @@ func main() {
switch k { switch k {
case "Server.LoggerLevel": case "Server.LoggerLevel":
if lvl, ok := changes[k].(string); ok { if lvl, ok := changes[k].(string); ok {
logger.Info(ctx, "logger level changed to %s", lvl) log.Info(ctx, "logger level changed to %s", lvl)
logger.DefaultLogger.Level(logger.ParseLevel(lvl)) log.Level(logger.ParseLevel(lvl))
} }
} }
} }
@ -231,10 +228,10 @@ func main() {
}() }()
go func() { go func() {
worker.Run(ctx, store, time.Duration(cfg.App.CheckInterval)) worker.Run(ctx, log, store, time.Duration(cfg.App.CheckInterval))
}() }()
if err = svc.Run(); err != nil { if err = svc.Run(); err != nil {
logger.Fatal(ctx, "failed to run svc: %v", err) log.Fatal(ctx, "failed to run svc: %v", err)
} }
} }

View File

@ -17,23 +17,22 @@ import (
"text/template" "text/template"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/configcli"
"git.unistack.org/unistack-org/pkgdash/internal/modules"
"git.unistack.org/unistack-org/pkgdash/internal/source"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
gitconfig "github.com/go-git/go-git/v5/config" gitconfig "github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http" httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
"github.com/jdx/go-netrc" "github.com/jdx/go-netrc"
yamlcodec "go.unistack.org/micro-codec-yaml/v4" yamlcodec "go.unistack.org/micro-codec-yaml/v3"
envconfig "go.unistack.org/micro-config-env/v4" envconfig "go.unistack.org/micro-config-env/v3"
fileconfig "go.unistack.org/micro-config-file/v4" fileconfig "go.unistack.org/micro-config-file/v3"
microflag "go.unistack.org/micro-config-flag/v4" microflag "go.unistack.org/micro-config-flag/v3"
"go.unistack.org/micro/v4/config" "go.unistack.org/micro/v3/config"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/micro/v4/logger/slog" "go.unistack.org/micro/v3/logger/slog"
"go.unistack.org/micro/v4/options" "go.unistack.org/pkgdash/internal/configcli"
"go.unistack.org/pkgdash/internal/modules"
"go.unistack.org/pkgdash/internal/source"
"golang.org/x/mod/modfile" "golang.org/x/mod/modfile"
"golang.org/x/mod/semver" "golang.org/x/mod/semver"
) )
@ -98,10 +97,10 @@ func main() {
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
defer cancel() defer cancel()
logger.DefaultLogger = slog.NewLogger() log := slog.NewLogger()
if err = logger.DefaultLogger.Init(logger.WithCallerSkipCount(3), logger.WithLevel(logger.DebugLevel)); err != nil { if err = log.Init(logger.WithLevel(logger.DebugLevel)); err != nil {
logger.Error(ctx, fmt.Sprintf("logger init error: %v", err)) log.Error(ctx, fmt.Sprintf("logger init error: %v", err))
} }
cfg := configcli.NewConfig() cfg := configcli.NewConfig()
@ -117,7 +116,7 @@ func main() {
}, },
config.LoadOverride(true), config.LoadOverride(true),
); err != nil { ); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to load config: %v", err)) log.Fatal(ctx, fmt.Sprintf("failed to load config: %v", err))
} }
for _, configDir := range configDirs { for _, configDir := range configDirs {
@ -130,15 +129,15 @@ func main() {
c := fileconfig.NewConfig( c := fileconfig.NewConfig(
config.AllowFail(false), config.AllowFail(false),
config.Struct(cfg), config.Struct(cfg),
options.Codec(yamlcodec.NewCodec()), config.Codec(yamlcodec.NewCodec()),
fileconfig.Path(path), fileconfig.Path(path),
) )
err = c.Init() err = c.Init()
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("failed to init config: %v", err)) log.Error(ctx, fmt.Sprintf("failed to init config: %v", err))
} }
if err = c.Load(ctx, config.LoadOverride(true)); err != nil { if err = c.Load(ctx, config.LoadOverride(true)); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to load config: %v", err)) log.Error(ctx, fmt.Sprintf("failed to load config: %v", err))
} }
} }
} }
@ -155,11 +154,11 @@ func main() {
c := microflag.NewConfig(config.Struct(cliCfg), microflag.FlagErrorHandling(flag.ContinueOnError)) c := microflag.NewConfig(config.Struct(cliCfg), microflag.FlagErrorHandling(flag.ContinueOnError))
if err = c.Init(); err != nil { if err = c.Init(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("init cli cfg failed: %v", err)) log.Fatal(ctx, fmt.Sprintf("init cli cfg failed: %v", err))
} }
if err = c.Load(ctx); err != nil { if err = c.Load(ctx); err != nil {
logger.Fatal(ctx, fmt.Sprintf("load cli cfg failed: %v", err)) log.Fatal(ctx, fmt.Sprintf("load cli cfg failed: %v", err))
} }
if cliCfg.Path == "" && cliCfg.Command == "" { if cliCfg.Path == "" && cliCfg.Command == "" {
@ -178,11 +177,11 @@ func main() {
} }
buf, err := os.ReadFile(name) buf, err := os.ReadFile(name)
if err != nil { if err != nil {
panic(err) log.Fatal(ctx, "failed to read file", err)
} }
mfile, err := modfile.Parse(name, buf, nil) mfile, err := modfile.Parse(name, buf, nil)
if err != nil { if err != nil {
panic(err) log.Fatal(ctx, "failed to parse file", err)
} }
mvs := make(map[string]modules.Update) mvs := make(map[string]modules.Update)
@ -195,7 +194,7 @@ func main() {
OnUpdate: func(u modules.Update) { OnUpdate: func(u modules.Update) {
var modpath string // new mod path with major var modpath string // new mod path with major
if u.Err != nil { if u.Err != nil {
logger.Error(ctx, fmt.Sprintf("%s: failed: %v", u.Module.Path, u.Err)) log.Error(ctx, fmt.Sprintf("%s: failed: %v", u.Module.Path, u.Err))
return return
} }
modpath = u.Module.Path modpath = u.Module.Path
@ -219,18 +218,34 @@ func main() {
modules.Updates(updateOptions) modules.Updates(updateOptions)
if err = getRepoMgmt(ctx, cfg); err != nil { // Filling in empty config fields. if err = getRepoMgmt(ctx, log, cfg); err != nil { // Filling in empty config fields.
logger.Error(ctx, err.Error()) log.Error(ctx, err.Error())
} }
gitSource := source.NewSourceControl(*cfg) if len(cfg.Branches) == 0 {
branchName, err := getCurrentBranch(ctx)
if err != nil {
log.Fatal(ctx, "failed to get current branch", err)
}
cfg.Branches = append(cfg.Branches, branchName)
}
Execute(ctx, gitSource, mvs, *cliCfg, *cfg) if cfg.Source.Repository == "" {
reposiotry, err := getCurrentRepository(ctx)
if err != nil {
log.Fatal(ctx, "failed to get current repository", err)
}
cfg.Source.Repository = reposiotry
}
logger.Info(ctx, "Pkgdash successfully updated dependencies") gitSource := source.NewSourceControl(*cfg, log)
Execute(ctx, log, gitSource, mvs, *cliCfg, *cfg)
log.Info(ctx, "Pkgdash successfully updated dependencies")
} }
func Execute(ctx context.Context, gitSource source.SourceControl, mvs map[string]modules.Update, cliCfg configcli.Cli, cfg configcli.Config) { func Execute(ctx context.Context, log logger.Logger, gitSource source.SourceControl, mvs map[string]modules.Update, cliCfg configcli.Cli, cfg configcli.Config) {
var mod modules.Update var mod modules.Update
var ok bool var ok bool
var path string var path string
@ -244,81 +259,131 @@ func Execute(ctx context.Context, gitSource source.SourceControl, mvs map[string
if cliCfg.Path != "" { // update one dep if cliCfg.Path != "" { // update one dep
path = cliCfg.Path path = cliCfg.Path
if mod, ok = mvs[path]; !ok { if mod, ok = mvs[path]; !ok {
logger.Fatal(ctx, fmt.Sprintf("For %s update not exist", path)) log.Fatal(ctx, fmt.Sprintf("For %s update not exist", path))
} }
logger.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version)) log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
for _, branch := range cfg.Branches { for _, branch := range cfg.Branches {
if err := gitSource.RequestOpen(ctx, branch, path, mod); err != nil { if err := gitSource.RequestOpen(ctx, branch, path, mod); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err)) log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
} }
} }
logger.Debug(ctx, fmt.Sprintf("Update successful for %s", path)) log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
return return
} }
for _, branch := range cfg.Branches { // update all dep for _, branch := range cfg.Branches { // update all dep
for path, mod = range mvs { for path, mod = range mvs {
logger.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version)) log.Debug(ctx, fmt.Sprintf("Start update %s from %s to %s", path, mod.Module.Version, mod.Version))
err := gitSource.RequestOpen(ctx, branch, path, mod) err := gitSource.RequestOpen(ctx, branch, path, mod)
if err != nil { if err != nil {
if strings.Contains(err.Error(), "already exists") { if strings.Contains(err.Error(), "already exists") {
logger.Debug(ctx, fmt.Sprintf("skip %s, branch already exists", path)) log.Debug(ctx, fmt.Sprintf("skip %s, branch already exists", path))
continue continue
} }
logger.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err)) log.Fatal(ctx, fmt.Sprintf("failed to create pr: %v", err))
} }
logger.Debug(ctx, fmt.Sprintf("Update successful for %s", path)) log.Debug(ctx, fmt.Sprintf("Update successful for %s", path))
} }
} }
case "close": case "close":
if cliCfg.Path != "" { // close one dep if cliCfg.Path != "" { // close one dep
path = cliCfg.Path path = cliCfg.Path
logger.Debug(ctx, fmt.Sprintf("Start close for %s", path)) log.Debug(ctx, fmt.Sprintf("Start close for %s", path))
for _, branch := range cfg.Branches { for _, branch := range cfg.Branches {
if err := gitSource.RequestClose(ctx, branch, path); err != nil { if err := gitSource.RequestClose(ctx, branch, path); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err)) log.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err))
} }
} }
logger.Debug(ctx, fmt.Sprintf("Close successful for %s", path)) log.Debug(ctx, fmt.Sprintf("Close successful for %s", path))
return return
} }
for _, branch := range cfg.Branches { for _, branch := range cfg.Branches {
logger.Info(ctx, fmt.Sprintf("Start getting pr for %s", branch)) log.Info(ctx, fmt.Sprintf("Start getting pr for %s", branch))
rMap, err := gitSource.RequestList(ctx, branch) rMap, err := gitSource.RequestList(ctx, branch)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("Error with getting pr list for branch: %s", branch)) log.Fatal(ctx, fmt.Sprintf("Error with getting pr list for branch: %s", branch))
} }
logger.Info(ctx, fmt.Sprintf("for %s:\n%s", branch, rMap)) log.Info(ctx, fmt.Sprintf("for %s:\n%s", branch, rMap))
logger.Info(ctx, fmt.Sprintf("Start close pr for base branch %s", branch)) log.Info(ctx, fmt.Sprintf("Start close pr for base branch %s", branch))
for path, _ = range rMap { for path = range rMap {
logger.Debug(ctx, fmt.Sprintf("Start close for %s", path)) log.Debug(ctx, fmt.Sprintf("Start close for %s", path))
if err = gitSource.RequestClose(ctx, branch, path); err != nil { if err = gitSource.RequestClose(ctx, branch, path); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err)) log.Fatal(ctx, fmt.Sprintf("failed to close pr: %v", err))
} }
logger.Debug(ctx, fmt.Sprintf("Close successful for %s", path)) log.Debug(ctx, fmt.Sprintf("Close successful for %s", path))
} }
} }
case "list": case "list":
for _, branch := range cfg.Branches { for _, branch := range cfg.Branches {
rMap, err := gitSource.RequestList(ctx, branch) rMap, err := gitSource.RequestList(ctx, branch)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("RequestList: error %s", err)) log.Fatal(ctx, fmt.Sprintf("RequestList: error %s", err))
} }
prList[branch] = rMap prList[branch] = rMap
} }
js, err := json.Marshal(prList) js, err := json.Marshal(prList)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("error: %s", err)) log.Error(ctx, fmt.Sprintf("error: %s", err))
} }
fmt.Println(fmt.Sprintf("for %s:\n%s", cfg.Source.Repository, js)) fmt.Printf("for %s:\n%s\n", cfg.Source.Repository, js)
default: default:
fmt.Print(initMsg) fmt.Print(initMsg)
} }
} }
func getRepoMgmt(ctx context.Context, cfg *configcli.Config) error { func getCurrentRepository(ctx context.Context) (string, error) {
wd, err := os.Getwd()
if err != nil {
return "", err
}
p := filepath.Clean(wd)
repo, err := git.PlainOpen(p)
if err != nil {
return "", err
}
cfg, err := repo.Config()
if err != nil {
return "", err
}
for k, v := range cfg.Remotes {
if k != "origin" {
continue
}
return v.URLs[0], nil
}
return "", fmt.Errorf("failed to get remotes")
}
func getCurrentBranch(ctx context.Context) (string, error) {
wd, err := os.Getwd()
if err != nil {
return "", err
}
p := filepath.Clean(wd)
repo, err := git.PlainOpen(p)
if err != nil {
return "", err
}
ref, err := repo.Head()
if err != nil {
return "", err
}
return ref.Name().Short(), nil
}
func getRepoMgmt(ctx context.Context, log logger.Logger, cfg *configcli.Config) error {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
return err return err
@ -327,27 +392,38 @@ func getRepoMgmt(ctx context.Context, cfg *configcli.Config) error {
p := filepath.Clean(wd) p := filepath.Clean(wd)
for _, configDir := range configDirs { for _, configDir := range configDirs {
_, err := os.Stat(filepath.Join(p, configDir)) _, err := os.Stat(filepath.Join(p, configDir))
if name, ok := repoMgmt[configDir]; ok && cfg.Source.TypeGit == "" && err == nil { if err != nil {
continue
}
log.Info(ctx, fmt.Sprintf("check config dir %s", configDir))
if name, ok := repoMgmt[configDir]; ok && cfg.Source.TypeGit == "" {
cfg.Source.TypeGit = name cfg.Source.TypeGit = name
} }
if api, ok := repoAPI[configDir]; ok && cfg.Source.APIURL == "" && err == nil { if api, ok := repoAPI[configDir]; ok && cfg.Source.APIURL == "" {
cfg.Source.APIURL = api cfg.Source.APIURL = api
} }
} }
if p == "/" && cfg.Source.TypeGit == "" && cfg.Source.APIURL == "" { if p == "/" && cfg.Source.TypeGit == "" && cfg.Source.APIURL == "" {
return fmt.Errorf("unknown") return fmt.Errorf("unknown")
} }
p = filepath.Clean(filepath.Join(p, "..")) // p = filepath.Clean(filepath.Join(p, ".."))
usr, err := user.Current() usr, err := user.Current()
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("pkgdash/main can t get info about user: %s", err)) log.Fatal(ctx, fmt.Sprintf("pkgdash/main cant get info about user: %s", err))
}
n, err := netrc.Parse(filepath.Join(usr.HomeDir, ".netrc"))
if err != nil {
logger.Error(ctx, "pkgdash/main can t parse .netrc: %s", err)
} }
log.Info(ctx, fmt.Sprintf("try to configure scm source %v", cfg.Source))
netrcfile := filepath.Join(usr.HomeDir, ".netrc")
log.Info(ctx, "try to parse netrc file "+netrcfile)
n, err := netrc.Parse(netrcfile)
if err != nil {
log.Error(ctx, "pkgdash/main cant parse .netrc: %s", err)
}
log.Info(ctx, "try to configure scm for "+cfg.Source.APIURL)
if cfg.Source.Owner == "" { if cfg.Source.Owner == "" {
cfg.Source.Owner = n.Machine(cfg.Source.APIURL).Get("login") cfg.Source.Owner = n.Machine(cfg.Source.APIURL).Get("login")
} }
@ -360,7 +436,7 @@ func getRepoMgmt(ctx context.Context, cfg *configcli.Config) error {
return nil return nil
} }
func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string, mods map[string]modules.Update) error { func giteaPullRequest(ctx context.Context, log logger.Logger, cfg *configcli.Config, branch string, mods map[string]modules.Update) error {
envAPIURL := os.Getenv("GITHUB_API_URL") envAPIURL := os.Getenv("GITHUB_API_URL")
envREPOSITORY := os.Getenv("GITHUB_REPOSITORY") envREPOSITORY := os.Getenv("GITHUB_REPOSITORY")
envTOKEN := os.Getenv("GITHUB_TOKEN") envTOKEN := os.Getenv("GITHUB_TOKEN")
@ -370,34 +446,34 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
tplTitle, err := template.New("pull_request_title").Parse(cfg.PullRequestTitle) tplTitle, err := template.New("pull_request_title").Parse(cfg.PullRequestTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to parse template: %v", err) log.Fatal(ctx, "failed to parse template: %v", err)
} }
wTitle := bytes.NewBuffer(nil) wTitle := bytes.NewBuffer(nil)
tplBody, err := template.New("pull_request_body").Parse(cfg.PullRequestBody) tplBody, err := template.New("pull_request_body").Parse(cfg.PullRequestBody)
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to parse template: %v", err) log.Fatal(ctx, "failed to parse template: %v", err)
} }
wBody := bytes.NewBuffer(nil) wBody := bytes.NewBuffer(nil)
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to open repo: %v", err) log.Fatal(ctx, "failed to open repo: %v", err)
} }
if err = repo.FetchContext(ctx, &git.FetchOptions{ if err = repo.FetchContext(ctx, &git.FetchOptions{
Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN}, Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN},
Force: true, Force: true,
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Fatal(ctx, "failed to fetch repo: %v", err) log.Fatal(ctx, "failed to fetch repo: %v", err)
} }
var headRef *plumbing.Reference var headRef *plumbing.Reference
refIter, err := repo.Branches() refIter, err := repo.Branches()
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to get branches: %v", err) log.Fatal(ctx, "failed to get branches: %v", err)
} }
for { for {
ref, err := refIter.Next() ref, err := refIter.Next()
@ -412,14 +488,14 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
refIter.Close() refIter.Close()
if headRef == nil { if headRef == nil {
logger.Fatal(ctx, "failed to get repo branch head") log.Fatal(ctx, "failed to get repo branch head")
} }
logger.Info(ctx, "repo head %s", headRef) log.Info(ctx, "repo head %s", headRef)
wtree, err := repo.Worktree() wtree, err := repo.Worktree()
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to get worktree: %v", err) log.Fatal(ctx, "failed to get worktree: %v", err)
} }
type giteaPull struct { type giteaPull struct {
@ -450,13 +526,13 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
} }
if err = json.Unmarshal(buf, &pulls); err != nil { if err = json.Unmarshal(buf, &pulls); err != nil {
logger.Fatal(ctx, "failed to decode response %s err: %v", buf, err) log.Fatal(ctx, "failed to decode response %s err: %v", buf, err)
} }
for path := range mods { for path := range mods {
for _, pull := range pulls { for _, pull := range pulls {
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch { if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
logger.Info(ctx, "skip %s as pr already exists %s", path, pull.URL) log.Info(ctx, "skip %s as pr already exists %s", path, pull.URL)
delete(mods, path) delete(mods, path)
} }
} }
@ -466,11 +542,11 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
wTitle.Reset() wTitle.Reset()
wBody.Reset() wBody.Reset()
logger.Info(ctx, "update %s from %s to %s", path, mod.Module.Version, mod.Version) log.Info(ctx, "update %s from %s to %s", path, mod.Module.Version, mod.Version)
logger.Info(ctx, "reset worktree") log.Info(ctx, "reset worktree")
if err = wtree.Reset(&git.ResetOptions{Mode: git.HardReset}); err != nil { if err = wtree.Reset(&git.ResetOptions{Mode: git.HardReset}); err != nil {
logger.Fatal(ctx, "failed to reset repo branch: %v", err) log.Fatal(ctx, "failed to reset repo branch: %v", err)
} }
if err = wtree.PullContext(ctx, &git.PullOptions{ if err = wtree.PullContext(ctx, &git.PullOptions{
@ -480,17 +556,17 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
Force: true, Force: true,
RemoteName: "origin", RemoteName: "origin",
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Fatal(ctx, "failed to pull repo: %v", err) log.Fatal(ctx, "failed to pull repo: %v", err)
} }
logger.Info(ctx, "checkout ref %s", headRef) log.Info(ctx, "checkout ref %s", headRef)
if err = wtree.Checkout(&git.CheckoutOptions{ if err = wtree.Checkout(&git.CheckoutOptions{
Hash: headRef.Hash(), Hash: headRef.Hash(),
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)), Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
Create: true, Create: true,
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Fatal(ctx, "failed to checkout tree: %v", err) log.Fatal(ctx, "failed to checkout tree: %v", err)
} }
epath, err := exec.LookPath("go") epath, err := exec.LookPath("go")
@ -498,7 +574,7 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
err = nil err = nil
} }
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to find go command: %v", err) log.Fatal(ctx, "failed to find go command: %v", err)
} }
var cmd *exec.Cmd var cmd *exec.Cmd
@ -506,25 +582,25 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, "failed to run go mod edit: %s err: %v", out, err) log.Fatal(ctx, "failed to run go mod edit: %s err: %v", out, err)
} }
cmd = exec.CommandContext(ctx, epath, "mod", "tidy") cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, "failed to run go mod tidy: %s err: %v", out, err) log.Fatal(ctx, "failed to run go mod tidy: %s err: %v", out, err)
} }
logger.Info(ctx, "worktree add go.mod") log.Info(ctx, "worktree add go.mod")
if _, err = wtree.Add("go.mod"); err != nil { if _, err = wtree.Add("go.mod"); err != nil {
logger.Fatal(ctx, "failed to add file: %v", err) log.Fatal(ctx, "failed to add file: %v", err)
} }
logger.Info(ctx, "worktree add go.sum") log.Info(ctx, "worktree add go.sum")
if _, err = wtree.Add("go.sum"); err != nil { if _, err = wtree.Add("go.sum"); err != nil {
logger.Fatal(ctx, "failed to add file: %v", err) log.Fatal(ctx, "failed to add file: %v", err)
} }
logger.Info(ctx, "worktree commit") log.Info(ctx, "worktree commit")
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{ _, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
Parents: []plumbing.Hash{headRef.Hash()}, Parents: []plumbing.Hash{headRef.Hash()},
Author: &object.Signature{ Author: &object.Signature{
@ -534,27 +610,27 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
}, },
}) })
if err != nil { if err != nil {
logger.Fatal(ctx, "failed to commit: %v", err) log.Fatal(ctx, "failed to commit: %v", err)
} }
// newref := plumbing.NewHashReference(plumbing.ReferenceName(fmt.Sprintf("refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version)), headRef.Hash()) // newref := plumbing.NewHashReference(plumbing.ReferenceName(fmt.Sprintf("refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version)), headRef.Hash())
/* /*
if err = repo.Storer.SetReference(newref); err != nil { if err = repo.Storer.SetReference(newref); err != nil {
logger.Fatal(ctx, "failed to create repo branch: %v", err) log.Fatal(ctx, "failed to create repo branch: %v", err)
} }
*/ */
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version))
logger.Info(ctx, "try to push refspec %s", refspec) log.Info(ctx, "try to push refspec %s", refspec)
if err = repo.PushContext(ctx, &git.PushOptions{ if err = repo.PushContext(ctx, &git.PushOptions{
RefSpecs: []gitconfig.RefSpec{refspec}, RefSpecs: []gitconfig.RefSpec{refspec},
Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN}, Auth: &httpauth.BasicAuth{Username: envTOKEN, Password: envTOKEN},
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Fatal(ctx, "failed to push repo branch: %v", err) log.Fatal(ctx, "failed to push repo branch: %v", err)
} }
data := map[string]string{ data := map[string]string{
@ -564,10 +640,10 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
} }
if err = tplTitle.Execute(wTitle, data); err != nil { if err = tplTitle.Execute(wTitle, data); err != nil {
logger.Fatal(ctx, "failed to execute template: %v", err) log.Fatal(ctx, "failed to execute template: %v", err)
} }
if err = tplBody.Execute(wBody, data); err != nil { if err = tplBody.Execute(wBody, data); err != nil {
logger.Fatal(ctx, "failed to execute template: %v", err) log.Fatal(ctx, "failed to execute template: %v", err)
} }
body := map[string]string{ body := map[string]string{
@ -576,14 +652,14 @@ func giteaPullRequest(ctx context.Context, cfg *configcli.Config, branch string,
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version), "head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
"title": wTitle.String(), "title": wTitle.String(),
} }
logger.Info(ctx, "raw body: %#+v", body) log.Info(ctx, "raw body: %#+v", body)
buf, err = json.Marshal(body) buf, err = json.Marshal(body)
if err != nil { if err != nil {
return err return err
} }
logger.Info(ctx, "marshal body: %s", buf) log.Info(ctx, "marshal body: %s", buf)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, envAPIURL+"/repos/"+envREPOSITORY+"/pulls?token="+envTOKEN, bytes.NewReader(buf)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, envAPIURL+"/repos/"+envREPOSITORY+"/pulls?token="+envTOKEN, bytes.NewReader(buf))
if err != nil { if err != nil {

View File

@ -7,6 +7,6 @@ package main
import ( import (
_ "github.com/envoyproxy/protoc-gen-validate" _ "github.com/envoyproxy/protoc-gen-validate"
_ "go.unistack.org/micro-proto/v4" _ "go.unistack.org/micro-proto/v3"
_ "go.unistack.org/protoc-gen-go-micro/v4" _ "go.unistack.org/protoc-gen-go-micro/v3"
) )

View File

@ -2,7 +2,7 @@
PROTO_ARGS=" \ PROTO_ARGS=" \
--proto_path=$(go list -f '{{ .Dir }}' -m github.com/envoyproxy/protoc-gen-validate) \ --proto_path=$(go list -f '{{ .Dir }}' -m github.com/envoyproxy/protoc-gen-validate) \
--proto_path=$(go list -f '{{ .Dir }}' -m go.unistack.org/micro-proto/v4) \ --proto_path=$(go list -f '{{ .Dir }}' -m go.unistack.org/micro-proto/v3) \
--go_out=paths=source_relative:./proto \ --go_out=paths=source_relative:./proto \
--go-micro_out=paths=source_relative,components=micro|http,standalone=false:./proto \ --go-micro_out=paths=source_relative,components=micro|http,standalone=false:./proto \
--validate_out=paths=source_relative,lang=go:./proto \ --validate_out=paths=source_relative,lang=go:./proto \
@ -12,4 +12,4 @@ PROTO_ARGS=" \
find ./proto -type f -name "*.pb.go" -delete find ./proto -type f -name "*.pb.go" -delete
protoc -I./proto $PROTO_ARGS ./proto/*.proto || find ./proto -type f -name "*.pb.go" -delete protoc -I./proto $PROTO_ARGS ./proto/*.proto || find ./proto -type f -name "*.pb.go" -delete
./ui/node_modules/.bin/ng-openapi-gen -i ./proto/apidocs.swagger.yaml -o ./ui/src/app/api --removeStaleFiles true --ignoreUnusedModels false #./ui/node_modules/.bin/ng-openapi-gen -i ./proto/apidocs.swagger.yaml -o ./ui/src/app/api --removeStaleFiles true --ignoreUnusedModels false

102
go.mod
View File

@ -1,54 +1,71 @@
module git.unistack.org/unistack-org/pkgdash module go.unistack.org/pkgdash
go 1.20 go 1.22.7
toolchain go1.23.3
require ( require (
github.com/envoyproxy/protoc-gen-validate v1.0.4 git.unistack.org/unistack-org/pkgdash v0.0.0-20240421141944-8729d0b88e60
github.com/envoyproxy/protoc-gen-validate v1.1.0
github.com/go-git/go-git/v5 v5.12.0 github.com/go-git/go-git/v5 v5.12.0
github.com/golang-migrate/migrate/v4 v4.17.0 github.com/golang-migrate/migrate/v4 v4.17.0
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/jackc/pgx/v4 v4.18.3 github.com/jackc/pgx/v5 v5.3.1
github.com/jdx/go-netrc v1.0.0 github.com/jdx/go-netrc v1.0.0
github.com/jmoiron/sqlx v1.3.5 github.com/jmoiron/sqlx v1.3.5
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
go.unistack.org/micro-client-http/v4 v4.0.3 go.unistack.org/micro-client-http/v3 v3.9.14
go.unistack.org/micro-codec-json/v4 v4.0.0 go.unistack.org/micro-codec-json/v3 v3.10.1
go.unistack.org/micro-codec-jsonpb/v4 v4.0.1 go.unistack.org/micro-codec-jsonpb/v3 v3.10.3
go.unistack.org/micro-codec-yaml/v4 v4.0.0 go.unistack.org/micro-codec-yaml/v3 v3.10.2
go.unistack.org/micro-config-env/v4 v4.0.3 go.unistack.org/micro-config-env/v3 v3.8.7
go.unistack.org/micro-config-flag/v4 v4.0.4 go.unistack.org/micro-config-file/v3 v3.8.10
go.unistack.org/micro-config-vault/v4 v4.0.4 go.unistack.org/micro-config-flag/v3 v3.8.11
go.unistack.org/micro-meter-victoriametrics/v4 v4.0.1 go.unistack.org/micro-config-vault/v3 v3.8.9
go.unistack.org/micro-proto/v4 v4.1.0 go.unistack.org/micro-meter-victoriametrics/v3 v3.8.9
go.unistack.org/micro-server-http/v4 v4.0.14 go.unistack.org/micro-proto/v3 v3.4.1
go.unistack.org/micro/v4 v4.0.19 go.unistack.org/micro-server-http/v3 v3.11.37
go.unistack.org/protoc-gen-go-micro/v4 v4.0.13 go.unistack.org/micro/v3 v3.11.1
golang.org/x/mod v0.16.0 go.unistack.org/protoc-gen-go-micro/v3 v3.10.10
golang.org/x/sync v0.6.0 golang.org/x/mod v0.17.0
golang.org/x/tools v0.19.0 golang.org/x/sync v0.9.0
google.golang.org/protobuf v1.33.0 golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d
google.golang.org/protobuf v1.35.2
modernc.org/sqlite v1.29.5 modernc.org/sqlite v1.29.5
) )
require ( require (
github.com/KimMachineGun/automemlimit v0.6.1 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cilium/ebpf v0.9.1 // indirect
github.com/containerd/cgroups/v3 v3.0.1 // indirect
github.com/coreos/go-systemd/v22 v22.3.2 // indirect
github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/cyphar/filepath-securejoin v0.2.4 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/go-jose/go-jose/v4 v4.0.4 // indirect
github.com/godbus/dbus/v5 v5.0.4 // indirect
github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 // indirect github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/jackc/pgx/v4 v4.18.3 // indirect
github.com/opencontainers/runtime-spec v1.0.2 // indirect
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5 // indirect github.com/silas/dag v0.0.0-20220518035006-a7e85ada93c5 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240311132316-a219d84964c2 // indirect github.com/sirupsen/logrus v1.9.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect go.uber.org/automaxprocs v1.6.0 // indirect
go.unistack.org/metrics v0.0.1 // indirect
go.unistack.org/micro-client-http/v4 v4.0.3 // indirect
go.unistack.org/micro/v4 v4.0.19 // indirect
go.unistack.org/protoc-gen-go-micro/v4 v4.0.13 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20241118233622-e639e219e697 // indirect
google.golang.org/grpc v1.68.0 // indirect
modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b // indirect modernc.org/gc/v3 v3.0.0-20240304020402-f0dba7c97c2b // indirect
) )
require ( require (
dario.cat/mergo v1.0.0 // indirect dario.cat/mergo v1.0.1 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v1.0.0 // indirect github.com/ProtonMail/go-crypto v1.0.0 // indirect
github.com/VictoriaMetrics/metrics v1.33.1 // indirect
github.com/acomagu/bufpipe v1.0.4 // indirect
github.com/cenkalti/backoff/v3 v3.2.2 // indirect
github.com/cloudflare/circl v1.3.7 // indirect github.com/cloudflare/circl v1.3.7 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect
@ -56,22 +73,19 @@ require (
github.com/fatih/structtag v1.2.0 // indirect github.com/fatih/structtag v1.2.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.5.0 // indirect github.com/go-git/go-billy/v5 v5.5.0 // indirect
github.com/go-jose/go-jose/v3 v3.0.3 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/gnostic v0.7.0 // indirect github.com/google/gnostic v0.7.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.5 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hashicorp/go-rootcerts v1.0.2 // indirect github.com/hashicorp/go-rootcerts v1.0.2 // indirect
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8 // indirect github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8 // indirect
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect
github.com/hashicorp/go-sockaddr v1.0.6 // indirect github.com/hashicorp/go-sockaddr v1.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hashicorp/vault/api v1.12.2 // indirect github.com/hashicorp/vault/api v1.15.0 // indirect
github.com/iancoleman/strcase v0.3.0 // indirect github.com/iancoleman/strcase v0.3.0 // indirect
github.com/imdario/mergo v0.3.16 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgconn v1.14.3 // indirect github.com/jackc/pgconn v1.14.3 // indirect
github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 // indirect github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 // indirect
@ -81,14 +95,12 @@ require (
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 // indirect github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 // indirect
github.com/jackc/pgtype v1.14.3 // indirect github.com/jackc/pgtype v1.14.3 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/lyft/protoc-gen-star/v2 v2.0.3 // indirect github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
@ -100,22 +112,18 @@ require (
github.com/valyala/histogram v1.2.0 // indirect github.com/valyala/histogram v1.2.0 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect
go.uber.org/atomic v1.11.0 // indirect go.uber.org/atomic v1.11.0 // indirect
go.unistack.org/micro-config-file/v4 v4.0.3 go.unistack.org/micro-proto/v4 v4.1.0 // indirect
golang.org/x/crypto v0.21.0 // indirect go.unistack.org/micro-server-http/v4 v4.0.14 // indirect
golang.org/x/net v0.22.0 // indirect golang.org/x/crypto v0.29.0 // indirect
golang.org/x/sys v0.18.0 // indirect golang.org/x/net v0.31.0 // indirect
golang.org/x/text v0.14.0 // indirect golang.org/x/sys v0.27.0 // indirect
golang.org/x/time v0.5.0 // indirect golang.org/x/text v0.20.0 // indirect
golang.org/x/time v0.7.0 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/uint128 v1.3.0 // indirect
modernc.org/cc/v3 v3.41.0 // indirect
modernc.org/ccgo/v3 v3.17.0 // indirect
modernc.org/libc v1.49.0 // indirect modernc.org/libc v1.49.0 // indirect
modernc.org/mathutil v1.6.0 // indirect modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.7.2 // indirect modernc.org/memory v1.7.2 // indirect
modernc.org/opt v0.1.3 // indirect
modernc.org/strutil v1.2.0 // indirect modernc.org/strutil v1.2.0 // indirect
modernc.org/token v1.1.0 // indirect modernc.org/token v1.1.0 // indirect
sigs.k8s.io/yaml v1.4.0 // indirect
) )

1141
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -28,7 +28,7 @@ func Test_Analyze(t *testing.T) {
}() }()
analyze, err := Analyze(context.Background(), file, models.Package{ analyze, err := Analyze(context.Background(), file, models.Package{
Name: "go.unistack.org/micro/v4", Name: "go.unistack.org/micro/v3",
URL: "https://git.unistack.org/unistack-org/micro.git", URL: "https://git.unistack.org/unistack-org/micro.git",
}) })
assert.Nil(t, err) assert.Nil(t, err)

View File

@ -3,7 +3,7 @@ package config
import ( import (
"time" "time"
mtime "go.unistack.org/micro/v4/util/time" mtime "go.unistack.org/micro/v3/util/time"
) )
type AppConfig struct { type AppConfig struct {

View File

@ -10,8 +10,8 @@ type Config struct {
type Source struct { type Source struct {
TypeGit string `json:"type" yaml:"type" env:"GIT_TYPE"` TypeGit string `json:"type" yaml:"type" env:"GIT_TYPE"`
Username string `json:"username" yaml:"username" env:"GIT_USERNAME"` Username string `json:"username" yaml:"username" env:"GIT_USERNAME"`
Password string `json:"password" yaml:"password" env:"GIT_PASSWORD,GIT_TOKEN"` Password string `json:"password" yaml:"password" env:"GIT_PASSWORD,GIT_TOKEN"`
APIURL string `json:"apiurl" yaml:"apiurl" env:"GIT_API"` APIURL string `json:"apiurl" yaml:"apiurl" env:"GIT_API"`
Repository string `json:"repository" yaml:"repository" env:"GIT_REPO"` Repository string `json:"repository" yaml:"repository" env:"GIT_REPO"`
Owner string `json:"owner" yaml:"owner" env:"GIT_OWNER"` Owner string `json:"owner" yaml:"owner" env:"GIT_OWNER"`

View File

@ -8,16 +8,16 @@ import (
"strings" "strings"
"time" "time"
appconfig "git.unistack.org/unistack-org/pkgdash/internal/config"
"github.com/golang-migrate/migrate/v4" "github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database" "github.com/golang-migrate/migrate/v4/database"
mpgx "github.com/golang-migrate/migrate/v4/database/pgx" mpgx "github.com/golang-migrate/migrate/v4/database/pgx"
msqlite "github.com/golang-migrate/migrate/v4/database/sqlite" msqlite "github.com/golang-migrate/migrate/v4/database/sqlite"
"github.com/golang-migrate/migrate/v4/source/iofs" "github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/jackc/pgx/v4" "github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v4/stdlib" "github.com/jackc/pgx/v5/stdlib"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
appconfig "go.unistack.org/pkgdash/internal/config"
_ "modernc.org/sqlite" _ "modernc.org/sqlite"
) )
@ -128,18 +128,18 @@ func Connect(ctx context.Context, cfg *appconfig.DatabaseConfig, log logger.Logg
case "": case "":
break break
case "up": case "up":
logger.Info(ctx, "migrate up") log.Info(ctx, "migrate up")
err = m.Up() err = m.Up()
case "down": case "down":
logger.Info(ctx, "migrate down") log.Info(ctx, "migrate down")
err = m.Down() err = m.Down()
case "seed": case "seed":
logger.Info(ctx, "migrate seed") log.Info(ctx, "migrate seed")
if err = m.Drop(); err == nil { if err = m.Drop(); err == nil {
err = m.Up() err = m.Up()
} }
default: default:
logger.Info(ctx, "migrate version") log.Info(ctx, "migrate version")
v, verr := strconv.ParseUint(cfg.Type, 10, 64) v, verr := strconv.ParseUint(cfg.Type, 10, 64)
if verr != nil { if verr != nil {
return nil, err return nil, err

View File

@ -6,18 +6,17 @@ import (
"errors" "errors"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) CommentCreate(ctx context.Context, req *pb.CommentCreateReq, rsp *pb.CommentCreateRsp) error { func (h *Handler) CommentCreate(ctx context.Context, req *pb.CommentCreateReq, rsp *pb.CommentCreateRsp) error {
logger.Debug(ctx, "Start AddComment") h.logger.Debug(ctx, "Start AddComment")
err := req.Validate() err := req.Validate()
if err != nil { if err != nil {
logger.Error(ctx, "validation error", err) h.logger.Error(ctx, "validation error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
@ -28,13 +27,13 @@ func (h *Handler) CommentCreate(ctx context.Context, req *pb.CommentCreateReq, r
httpsrv.SetRspCode(ctx, http.StatusNotFound) httpsrv.SetRspCode(ctx, http.StatusNotFound)
return httpsrv.SetError(NewNotFoundError(err)) return httpsrv.SetError(NewNotFoundError(err))
} }
logger.Error(ctx, "comment create error", err) h.logger.Error(ctx, "comment create error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
rsp.Comment = models.NewComment(com) rsp.Comment = models.NewComment(com)
logger.Debug(ctx, "Success finish addComment") h.logger.Debug(ctx, "Success finish addComment")
return nil return nil
} }

View File

@ -6,17 +6,16 @@ import (
"errors" "errors"
"net/http" "net/http"
pb "git.unistack.org/unistack-org/pkgdash/proto" httpsrv "go.unistack.org/micro-server-http/v3"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) CommentDelete(ctx context.Context, req *pb.CommentDeleteReq, rsp *pb.CommentDeleteRsp) error { func (h *Handler) CommentDelete(ctx context.Context, req *pb.CommentDeleteReq, rsp *pb.CommentDeleteRsp) error {
logger.Debug(ctx, "Start AddComment") h.logger.Debug(ctx, "Start AddComment")
err := req.Validate() err := req.Validate()
if err != nil { if err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
@ -27,11 +26,11 @@ func (h *Handler) CommentDelete(ctx context.Context, req *pb.CommentDeleteReq, r
httpsrv.SetRspCode(ctx, http.StatusNotFound) httpsrv.SetRspCode(ctx, http.StatusNotFound)
return httpsrv.SetError(NewNotFoundError(err)) return httpsrv.SetError(NewNotFoundError(err))
} }
logger.Error(ctx, "comment delete error", err) h.logger.Error(ctx, "comment delete error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
logger.Debug(ctx, "Success finish addComment") h.logger.Debug(ctx, "Success finish addComment")
return nil return nil
} }

View File

@ -4,25 +4,24 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) CommentList(ctx context.Context, req *pb.CommentListReq, rsp *pb.CommentListRsp) error { func (h *Handler) CommentList(ctx context.Context, req *pb.CommentListReq, rsp *pb.CommentListRsp) error {
logger.Debug(ctx, "Start GetModule") h.logger.Debug(ctx, "Start GetModule")
err := req.Validate() err := req.Validate()
if err != nil { if err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
comments, err := h.store.CommentList(ctx, req) comments, err := h.store.CommentList(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "comment list error", err) h.logger.Error(ctx, "comment list error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
@ -31,6 +30,6 @@ func (h *Handler) CommentList(ctx context.Context, req *pb.CommentListReq, rsp *
rsp.Comments = append(rsp.Comments, models.NewComment(com)) rsp.Comments = append(rsp.Comments, models.NewComment(com))
} }
logger.Debug(ctx, "Success finish getModule") h.logger.Debug(ctx, "Success finish getModule")
return nil return nil
} }

View File

@ -3,7 +3,7 @@ package handler
import ( import (
"context" "context"
pb "git.unistack.org/unistack-org/pkgdash/proto" pb "go.unistack.org/pkgdash/proto"
) )
func (h *Handler) CommentLookup(ctx context.Context, req *pb.CommentLookupReq, rsp *pb.CommentLookupRsp) error { func (h *Handler) CommentLookup(ctx context.Context, req *pb.CommentLookupReq, rsp *pb.CommentLookupRsp) error {

View File

@ -4,16 +4,18 @@ import (
"net/http" "net/http"
"strconv" "strconv"
"git.unistack.org/unistack-org/pkgdash/internal/storage"
pb "git.unistack.org/unistack-org/pkgdash/proto"
"github.com/google/uuid" "github.com/google/uuid"
jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v4" jsonpbcodec "go.unistack.org/micro-codec-jsonpb/v3"
"go.unistack.org/micro/v4/codec" "go.unistack.org/micro/v3/codec"
"go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/storage"
pb "go.unistack.org/pkgdash/proto"
) )
type Handler struct { type Handler struct {
store storage.Storage logger logger.Logger
codec codec.Codec store storage.Storage
codec codec.Codec
} }
func NewNotFoundError(err error) *pb.ErrorRsp { func NewNotFoundError(err error) *pb.ErrorRsp {
@ -43,10 +45,11 @@ func NewValidationError(err error) *pb.ErrorRsp {
} }
} }
func NewHandler(store storage.Storage) (*Handler, error) { func NewHandler(log logger.Logger, store storage.Storage) (*Handler, error) {
h := &Handler{ h := &Handler{
codec: jsonpbcodec.NewCodec(), logger: log,
store: store, codec: jsonpbcodec.NewCodec(),
store: store,
} }
return h, nil return h, nil

View File

@ -4,18 +4,17 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) HandlerList(ctx context.Context, req *pb.HandlerListReq, rsp *pb.HandlerListRsp) error { func (h *Handler) HandlerList(ctx context.Context, req *pb.HandlerListReq, rsp *pb.HandlerListRsp) error {
logger.Debug(ctx, "HandlerList handler start") h.logger.Debug(ctx, "HandlerList handler start")
packages, err := h.store.HandlerList(ctx, req) packages, err := h.store.HandlerList(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "error db response: %v", err) h.logger.Error(ctx, "error db response: %v", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
@ -23,6 +22,6 @@ func (h *Handler) HandlerList(ctx context.Context, req *pb.HandlerListReq, rsp *
for _, hdlr := range packages { for _, hdlr := range packages {
rsp.Handlers = append(rsp.Handlers, models.NewHandler(hdlr)) rsp.Handlers = append(rsp.Handlers, models.NewHandler(hdlr))
} }
logger.Debug(ctx, "HandlerList handler stop") h.logger.Debug(ctx, "HandlerList handler stop")
return nil return nil
} }

View File

@ -4,25 +4,24 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) ModuleList(ctx context.Context, req *pb.ModuleListReq, rsp *pb.ModuleListRsp) error { func (h *Handler) ModuleList(ctx context.Context, req *pb.ModuleListReq, rsp *pb.ModuleListRsp) error {
logger.Debug(ctx, "Start GetModule") h.logger.Debug(ctx, "Start GetModule")
err := req.Validate() err := req.Validate()
if err != nil { if err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
modules, err := h.store.ModuleList(ctx, req) modules, err := h.store.ModuleList(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "module list error", err) h.logger.Error(ctx, "module list error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
@ -30,6 +29,6 @@ func (h *Handler) ModuleList(ctx context.Context, req *pb.ModuleListReq, rsp *pb
for _, mod := range modules { for _, mod := range modules {
rsp.Modules = append(rsp.Modules, models.NewModule(mod)) rsp.Modules = append(rsp.Modules, models.NewModule(mod))
} }
logger.Debug(ctx, "Success finish getModule") h.logger.Debug(ctx, "Success finish getModule")
return nil return nil
} }

View File

@ -4,30 +4,29 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageCreate(ctx context.Context, req *pb.PackageCreateReq, rsp *pb.PackageCreateRsp) error { func (h *Handler) PackageCreate(ctx context.Context, req *pb.PackageCreateReq, rsp *pb.PackageCreateRsp) error {
logger.Debug(ctx, "PackagesCreate handler start") h.logger.Debug(ctx, "PackagesCreate handler start")
if err := req.Validate(); err != nil { if err := req.Validate(); err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
pkg, err := h.store.PackageCreate(ctx, req) pkg, err := h.store.PackageCreate(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "package create error", err) h.logger.Error(ctx, "package create error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
rsp.Package = models.NewPackage(pkg) rsp.Package = models.NewPackage(pkg)
logger.Debug(ctx, "PackagesCreate handler stop") h.logger.Debug(ctx, "PackagesCreate handler stop")
return nil return nil
} }

View File

@ -4,26 +4,25 @@ import (
"context" "context"
"net/http" "net/http"
pb "git.unistack.org/unistack-org/pkgdash/proto" httpsrv "go.unistack.org/micro-server-http/v3"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageDelete(ctx context.Context, req *pb.PackageDeleteReq, rsp *pb.PackageDeleteRsp) error { func (h *Handler) PackageDelete(ctx context.Context, req *pb.PackageDeleteReq, rsp *pb.PackageDeleteRsp) error {
logger.Debug(ctx, "Start UpdatePackage") h.logger.Debug(ctx, "Start UpdatePackage")
if err := req.Validate(); err != nil { if err := req.Validate(); err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
if err := h.store.PackageDelete(ctx, req); err != nil { if err := h.store.PackageDelete(ctx, req); err != nil {
logger.Error(ctx, "package delete error", err) h.logger.Error(ctx, "package delete error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
logger.Debug(ctx, "Success finish UpdatePackage") h.logger.Debug(ctx, "Success finish UpdatePackage")
return nil return nil
} }

View File

@ -4,18 +4,17 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageList(ctx context.Context, req *pb.PackageListReq, rsp *pb.PackageListRsp) error { func (h *Handler) PackageList(ctx context.Context, req *pb.PackageListReq, rsp *pb.PackageListRsp) error {
logger.Debug(ctx, "PackagesList handler start") h.logger.Debug(ctx, "PackagesList handler start")
packages, err := h.store.PackageList(ctx, req) packages, err := h.store.PackageList(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "error db response: %v", err) h.logger.Error(ctx, "error db response: %v", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
@ -23,6 +22,6 @@ func (h *Handler) PackageList(ctx context.Context, req *pb.PackageListReq, rsp *
for _, pkg := range packages { for _, pkg := range packages {
rsp.Packages = append(rsp.Packages, models.NewPackage(pkg)) rsp.Packages = append(rsp.Packages, models.NewPackage(pkg))
} }
logger.Debug(ctx, "PackagesList handler stop") h.logger.Debug(ctx, "PackagesList handler stop")
return nil return nil
} }

View File

@ -4,30 +4,29 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageLookup(ctx context.Context, req *pb.PackageLookupReq, rsp *pb.PackageLookupRsp) error { func (h *Handler) PackageLookup(ctx context.Context, req *pb.PackageLookupReq, rsp *pb.PackageLookupRsp) error {
logger.Debug(ctx, "Start PackagesLookup") h.logger.Debug(ctx, "Start PackagesLookup")
if err := req.Validate(); err != nil { if err := req.Validate(); err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
pkg, err := h.store.PackageLookup(ctx, req) pkg, err := h.store.PackageLookup(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "package lookup", err) h.logger.Error(ctx, "package lookup", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
rsp.Package = models.NewPackage(pkg) rsp.Package = models.NewPackage(pkg)
logger.Debug(ctx, "Success finish PackagesLookup") h.logger.Debug(ctx, "Success finish PackagesLookup")
return nil return nil
} }

View File

@ -4,18 +4,17 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageModules(ctx context.Context, req *pb.PackageModulesReq, rsp *pb.PackageModulesRsp) error { func (h *Handler) PackageModules(ctx context.Context, req *pb.PackageModulesReq, rsp *pb.PackageModulesRsp) error {
logger.Debug(ctx, "PackageModules handler start") h.logger.Debug(ctx, "PackageModules handler start")
modules, err := h.store.PackageModules(ctx, req) modules, err := h.store.PackageModules(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "error db response: %v", err) h.logger.Error(ctx, "error db response: %v", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
@ -23,6 +22,6 @@ func (h *Handler) PackageModules(ctx context.Context, req *pb.PackageModulesReq,
for _, mod := range modules { for _, mod := range modules {
rsp.Modules = append(rsp.Modules, models.NewModule(mod)) rsp.Modules = append(rsp.Modules, models.NewModule(mod))
} }
logger.Debug(ctx, "PackagesModules handler stop") h.logger.Debug(ctx, "PackagesModules handler stop")
return nil return nil
} }

View File

@ -4,30 +4,29 @@ import (
"context" "context"
"net/http" "net/http"
"git.unistack.org/unistack-org/pkgdash/internal/models" httpsrv "go.unistack.org/micro-server-http/v3"
pb "git.unistack.org/unistack-org/pkgdash/proto" "go.unistack.org/pkgdash/internal/models"
httpsrv "go.unistack.org/micro-server-http/v4" pb "go.unistack.org/pkgdash/proto"
"go.unistack.org/micro/v4/logger"
) )
func (h *Handler) PackageUpdate(ctx context.Context, req *pb.PackageUpdateReq, rsp *pb.PackageUpdateRsp) error { func (h *Handler) PackageUpdate(ctx context.Context, req *pb.PackageUpdateReq, rsp *pb.PackageUpdateRsp) error {
logger.Debug(ctx, "Start UpdatePackage") h.logger.Debug(ctx, "Start UpdatePackage")
if err := req.Validate(); err != nil { if err := req.Validate(); err != nil {
logger.Error(ctx, "validate error", err) h.logger.Error(ctx, "validate error", err)
httpsrv.SetRspCode(ctx, http.StatusBadRequest) httpsrv.SetRspCode(ctx, http.StatusBadRequest)
return httpsrv.SetError(NewValidationError(err)) return httpsrv.SetError(NewValidationError(err))
} }
pkg, err := h.store.PackageUpdate(ctx, req) pkg, err := h.store.PackageUpdate(ctx, req)
if err != nil { if err != nil {
logger.Error(ctx, "package update error", err) h.logger.Error(ctx, "package update error", err)
httpsrv.SetRspCode(ctx, http.StatusInternalServerError) httpsrv.SetRspCode(ctx, http.StatusInternalServerError)
return httpsrv.SetError(NewInternalError(err)) return httpsrv.SetError(NewInternalError(err))
} }
rsp.Package = models.NewPackage(pkg) rsp.Package = models.NewPackage(pkg)
logger.Debug(ctx, "Success finish UpdatePackage") h.logger.Debug(ctx, "Success finish UpdatePackage")
return nil return nil
} }

View File

@ -4,7 +4,7 @@ import (
"database/sql" "database/sql"
"time" "time"
pb "git.unistack.org/unistack-org/pkgdash/proto" pb "go.unistack.org/pkgdash/proto"
"google.golang.org/protobuf/types/known/timestamppb" "google.golang.org/protobuf/types/known/timestamppb"
) )

View File

@ -14,19 +14,20 @@ import (
"text/template" "text/template"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/configcli"
"git.unistack.org/unistack-org/pkgdash/internal/modules"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
gitconfig "github.com/go-git/go-git/v5/config" gitconfig "github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http" httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/configcli"
"go.unistack.org/pkgdash/internal/modules"
) )
var ErrPRNotExist = errors.New("pull request does not exist") var ErrPRNotExist = errors.New("pull request does not exist")
type Gitea struct { type Gitea struct {
logger logger.Logger
URL string URL string
Username string Username string
Password string Password string
@ -38,8 +39,9 @@ type Gitea struct {
baseRef *plumbing.Reference baseRef *plumbing.Reference
} }
func NewGitea(cfg configcli.Config) *Gitea { func NewGitea(cfg configcli.Config, log logger.Logger) *Gitea {
return &Gitea{ return &Gitea{
logger: log,
URL: cfg.Source.APIURL, URL: cfg.Source.APIURL,
Username: cfg.Source.Username, Username: cfg.Source.Username,
Password: cfg.Source.Password, Password: cfg.Source.Password,
@ -67,21 +69,21 @@ func (g *Gitea) Name() string {
} }
func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
var buf []byte var buf []byte
var err error var err error
// создания шаблона названия для пулл реквеста // создания шаблона названия для пулл реквеста
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle) tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wTitle := bytes.NewBuffer(nil) wTitle := bytes.NewBuffer(nil)
// создания шаблона тела для пулл реквеста // создания шаблона тела для пулл реквеста
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle) tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wBody := bytes.NewBuffer(nil) wBody := bytes.NewBuffer(nil)
@ -93,37 +95,37 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
} }
if err = tplTitle.Execute(wTitle, data); err != nil { if err = tplTitle.Execute(wTitle, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
if err = tplBody.Execute(wBody, data); err != nil { if err = tplBody.Execute(wBody, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
// открытие гит репозитория с опцией обхода репозитория для нахождения .git // открытие гит репозитория с опцией обхода репозитория для нахождения .git
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
} }
//извлекаем ссылки с объектами из удаленного объекта?? // извлекаем ссылки с объектами из удаленного объекта??
if err = repo.FetchContext(ctx, &git.FetchOptions{ if err = repo.FetchContext(ctx, &git.FetchOptions{
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err))
} //обновляем репозиторий } // обновляем репозиторий
var headRef *plumbing.Reference // вроде ссылка на гит var headRef *plumbing.Reference // вроде ссылка на гит
if g.baseRef == nil { if g.baseRef == nil {
g.baseRef, err = repo.Head() g.baseRef, err = repo.Head()
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err)) g.logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err))
} }
} }
refIter, err := repo.Branches() //получение веток refIter, err := repo.Branches() // получение веток
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err))
return err return err
} }
for { for {
@ -131,45 +133,45 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
if err != nil { if err != nil {
break break
} }
if ref.Name().Short() == branch { //todo вот тут возможно нужно переделать if ref.Name().Short() == branch { // todo вот тут возможно нужно переделать
headRef = ref headRef = ref
break break
} }
} //перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef } // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
refIter.Close() refIter.Close()
if headRef == nil { if headRef == nil {
logger.Fatal(ctx, "failed to get repo branch head") g.logger.Fatal(ctx, "failed to get repo branch head")
return err return err
} // Не получили нужную ветку } // Не получили нужную ветку
logger.Info(ctx, fmt.Sprintf("repo head %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("repo head %s", headRef))
wtree, err := repo.Worktree() //todo вроде рабочее дерево не нужно wtree, err := repo.Worktree() // todo вроде рабочее дерево не нужно
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err))
} }
defer checkout(*wtree, *g.baseRef) defer g.checkout(*wtree, *g.baseRef)
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil && err != ErrPRNotExist {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) && strings.Contains(pull.Base.Ref, branch) { if strings.Contains(pull.Title, path) && strings.Contains(pull.Base.Ref, branch) {
logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL)) g.logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL))
return g.RequestUpdate(ctx, branch, path, mod) return g.RequestUpdate(ctx, branch, path, mod)
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим } // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
} }
logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
logger.Info(ctx, "reset worktree") g.logger.Info(ctx, "reset worktree")
if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil { if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err))
} //вроде меняем ветку todo вроде можно удалить } // вроде меняем ветку todo вроде можно удалить
if err = wtree.PullContext(ctx, &git.PullOptions{ if err = wtree.PullContext(ctx, &git.PullOptions{
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
@ -178,26 +180,26 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
Force: true, Force: true,
RemoteName: "origin", RemoteName: "origin",
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) //подтягиваем изменения с удаленого репозитория g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
} }
logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
if err = wtree.Checkout(&git.CheckoutOptions{ if err = wtree.Checkout(&git.CheckoutOptions{
Hash: headRef.Hash(), Hash: headRef.Hash(),
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)), Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
Create: true, Create: true,
Force: true, Force: true,
}); err != nil { }); err != nil && err != git.ErrBranchExists && err != git.ErrInvalidReference {
logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
return err return err
} //создаем новую ветку } // создаем новую ветку
epath, err := exec.LookPath("go") epath, err := exec.LookPath("go")
if errors.Is(err, exec.ErrDot) { if errors.Is(err, exec.ErrDot) {
err = nil err = nil
} }
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
} // ищем go файл } // ищем go файл
var cmd *exec.Cmd var cmd *exec.Cmd
@ -205,30 +207,30 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} }
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} // пытаемся выполнить команду go mod edit с новой версией модуля } // пытаемся выполнить команду go mod edit с новой версией модуля
cmd = exec.CommandContext(ctx, epath, "mod", "tidy") cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля } // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
logger.Info(ctx, "worktree add go.mod") g.logger.Info(ctx, "worktree add go.mod")
if _, err = wtree.Add("go.mod"); err != nil { if _, err = wtree.Add("go.mod"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree add go.sum") g.logger.Info(ctx, "worktree add go.sum")
if _, err = wtree.Add("go.sum"); err != nil { if _, err = wtree.Add("go.sum"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree commit") g.logger.Info(ctx, "worktree commit")
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{ _, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
Parents: []plumbing.Hash{headRef.Hash()}, Parents: []plumbing.Hash{headRef.Hash()},
Author: &object.Signature{ Author: &object.Signature{
@ -238,19 +240,19 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
}, },
}) // хотим за коммитить изменения }) // хотим за коммитить изменения
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err))
} }
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) //todo как будто нужно переделать refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec)) g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
if err = repo.PushContext(ctx, &git.PushOptions{ if err = repo.PushContext(ctx, &git.PushOptions{
RefSpecs: []gitconfig.RefSpec{refspec}, RefSpecs: []gitconfig.RefSpec{refspec},
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err))
} // пытаемся за пушить изменения } // пытаемся за пушить изменения
body := map[string]string{ body := map[string]string{
@ -259,14 +261,14 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version), "head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
"title": wTitle.String(), "title": wTitle.String(),
} }
logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body)) g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
buf, err = json.Marshal(body) buf, err = json.Marshal(body)
if err != nil { if err != nil {
return err return err
} }
logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf)) g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
req, err := http.NewRequestWithContext( req, err := http.NewRequestWithContext(
ctx, ctx,
@ -284,29 +286,29 @@ func (g *Gitea) RequestOpen(ctx context.Context, branch string, path string, mod
rsp, err := http.DefaultClient.Do(req) rsp, err := http.DefaultClient.Do(req)
if err != nil { if err != nil {
return err return err
} //Вроде создаем новый реквест на создание пулл реквеста } // Вроде создаем новый реквест на создание пулл реквеста
if rsp.StatusCode != http.StatusCreated { if rsp.StatusCode != http.StatusCreated {
buf, _ = io.ReadAll(rsp.Body) buf, _ = io.ReadAll(rsp.Body)
return fmt.Errorf("unknown error: %s", buf) return fmt.Errorf("unknown error: %s", buf)
} }
logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
repo, err = git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err = git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
} }
return nil return nil
} }
func (g *Gitea) RequestClose(ctx context.Context, branch string, path string) error { func (g *Gitea) RequestClose(ctx context.Context, branch string, path string) error {
logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path))
var err error var err error
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
@ -314,67 +316,67 @@ func (g *Gitea) RequestClose(ctx context.Context, branch string, path string) er
var b string // Name of the branch to be deleted var b string // Name of the branch to be deleted
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch { if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL)) g.logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL))
prExist = true prExist = true
b = pull.Head.Ref b = pull.Head.Ref
} }
} }
if !prExist { if !prExist {
logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path)) g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
return ErrPRNotExist return ErrPRNotExist
} }
req, err := DeleteBranch(ctx, g.URL, g.Owner, g.Repository, b, g.Password) req, err := g.DeleteBranch(ctx, g.URL, g.Owner, g.Repository, b, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err)) g.logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err))
return err return err
} }
rsp, err := http.DefaultClient.Do(req) rsp, err := http.DefaultClient.Do(req)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode)) g.logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
return err return err
} }
logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path)) g.logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path))
return nil return nil
} }
func (g *Gitea) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gitea) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path))
var err error var err error
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
prExist := false prExist := false
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) && pull.Base.Ref == branch { if strings.Contains(pull.Title, path) && pull.Base.Ref == branch {
logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) //todo g.logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) // todo
tVersion := getVersions(pull.Head.Ref) //Надо взять просто из названия ветки последнюю версию tVersion := getVersions(pull.Head.Ref) // Надо взять просто из названия ветки последнюю версию
if modules.IsNewerVersion(tVersion, mod.Version, false) { if modules.IsNewerVersion(tVersion, mod.Version, false) {
reqDel, err := DeleteBranch(ctx, g.URL, g.Owner, g.Repository, pull.Head.Ref, g.Password) reqDel, err := g.DeleteBranch(ctx, g.URL, g.Owner, g.Repository, pull.Head.Ref, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("Error with create request for branch: %s, err: %s", branch, err)) g.logger.Error(ctx, fmt.Sprintf("Error with create request for branch: %s, err: %s", branch, err))
return err return err
} }
rsp, err := http.DefaultClient.Do(reqDel) rsp, err := http.DefaultClient.Do(reqDel)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("Error with do request for branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode)) g.logger.Error(ctx, fmt.Sprintf("Error with do request for branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
return err return err
} }
logger.Info(ctx, fmt.Sprintf("Old pr %s successful delete", pull.Head.Ref)) g.logger.Info(ctx, fmt.Sprintf("Old pr %s successful delete", pull.Head.Ref))
} else { } else {
logger.Debug(ctx, "The existing PR is relevant") g.logger.Debug(ctx, "The existing PR is relevant")
return nil return nil
} }
prExist = true prExist = true
} }
} }
if !prExist { if !prExist {
logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path)) g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
return ErrPRNotExist return ErrPRNotExist
} }
@ -382,12 +384,12 @@ func (g *Gitea) RequestUpdate(ctx context.Context, branch string, path string, m
} }
func (g *Gitea) RequestList(ctx context.Context, branch string) (map[string]string, error) { func (g *Gitea) RequestList(ctx context.Context, branch string) (map[string]string, error) {
logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch)) g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
var err error var err error
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return nil, err return nil, err
} }
@ -395,10 +397,10 @@ func (g *Gitea) RequestList(ctx context.Context, branch string) (map[string]stri
rMap := make(map[string]string) rMap := make(map[string]string)
for _, pull := range g.pulls { for _, pull := range g.pulls {
if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { //добавляем только реквесты бота по обновлению модулей if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { // добавляем только реквесты бота по обновлению модулей
continue continue
} }
path = strings.Split(pull.Title, " ")[1] //todo Работет только для дефолтного шаблона path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
rMap[path] = pull.Title rMap[path] = pull.Title
} }
return rMap, nil return rMap, nil
@ -412,7 +414,7 @@ func getVersions(s string) string {
return version return version
} }
func DeleteBranch(ctx context.Context, url, owner, repo, branch, password string) (*http.Request, error) { func (g *Gitea) DeleteBranch(ctx context.Context, url, owner, repo, branch, password string) (*http.Request, error) {
var buf []byte var buf []byte
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v1/repos/%s/%s/branches/%s", url, owner, repo, branch), bytes.NewReader(buf)) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v1/repos/%s/%s/branches/%s", url, owner, repo, branch), bytes.NewReader(buf))
if err != nil { if err != nil {
@ -424,7 +426,7 @@ func DeleteBranch(ctx context.Context, url, owner, repo, branch, password string
return req, err return req, err
} }
func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaPull, error) { func (g *Gitea) GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaPull, error) {
var pullsAll []*giteaPull var pullsAll []*giteaPull
page := 1 page := 1
@ -437,7 +439,7 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaP
nil) nil)
if err != nil { if err != nil {
return nil, err return nil, err
} //вроде запроса к репозиторию } // вроде запроса к репозиторию
req.Header.Add("Accept", "application/json") req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json") req.Header.Add("Content-Type", "application/json")
@ -453,13 +455,13 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaP
switch rsp.StatusCode { switch rsp.StatusCode {
case http.StatusOK: case http.StatusOK:
if err = json.Unmarshal(buf, &pulls); err != nil { if err = json.Unmarshal(buf, &pulls); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err)) g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
return nil, err return nil, err
} }
pullsAll = append(pullsAll, pulls...) pullsAll = append(pullsAll, pulls...)
page++ page++
case http.StatusNotFound: case http.StatusNotFound:
logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo)) g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo))
return nil, ErrPRNotExist return nil, ErrPRNotExist
default: default:
return nil, fmt.Errorf("unknown error: %s", buf) return nil, fmt.Errorf("unknown error: %s", buf)
@ -473,9 +475,9 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*giteaP
return pullsAll, nil return pullsAll, nil
} }
func checkout(w git.Worktree, ref plumbing.Reference) { func (g *Gitea) checkout(w git.Worktree, ref plumbing.Reference) {
ctx := context.Background() ctx := context.Background()
logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short())) g.logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short()))
if err := w.Checkout(&git.CheckoutOptions{ if err := w.Checkout(&git.CheckoutOptions{
Branch: ref.Name(), Branch: ref.Name(),
@ -483,6 +485,6 @@ func checkout(w git.Worktree, ref plumbing.Reference) {
Force: true, Force: true,
Keep: false, Keep: false,
}); err != nil { }); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err))
} }
} }

View File

@ -13,19 +13,20 @@ import (
"text/template" "text/template"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/configcli"
"git.unistack.org/unistack-org/pkgdash/internal/modules"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
gitconfig "github.com/go-git/go-git/v5/config" gitconfig "github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http" httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/configcli"
"go.unistack.org/pkgdash/internal/modules"
) )
var ErrPRNotExist = errors.New("pull request does not exist") var ErrPRNotExist = errors.New("pull request does not exist")
type Github struct { type Github struct {
logger logger.Logger
URL string URL string
Username string Username string
Password string Password string
@ -37,8 +38,9 @@ type Github struct {
baseRef *plumbing.Reference baseRef *plumbing.Reference
} }
func NewGithub(cfg configcli.Config) *Github { func NewGithub(cfg configcli.Config, log logger.Logger) *Github {
return &Github{ return &Github{
logger: log,
URL: cfg.Source.APIURL, URL: cfg.Source.APIURL,
Username: cfg.Source.Username, Username: cfg.Source.Username,
Password: cfg.Source.Password, Password: cfg.Source.Password,
@ -66,21 +68,21 @@ func (g *Github) Name() string {
} }
func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
var buf []byte var buf []byte
var err error var err error
// создания шаблона названия для пулл реквеста // создания шаблона названия для пулл реквеста
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle) tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wTitle := bytes.NewBuffer(nil) wTitle := bytes.NewBuffer(nil)
// создания шаблона тела для пулл реквеста // создания шаблона тела для пулл реквеста
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle) tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wBody := bytes.NewBuffer(nil) wBody := bytes.NewBuffer(nil)
@ -92,37 +94,37 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
} }
if err = tplTitle.Execute(wTitle, data); err != nil { if err = tplTitle.Execute(wTitle, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
if err = tplBody.Execute(wBody, data); err != nil { if err = tplBody.Execute(wBody, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
// открытие гит репозитория с опцией обхода репозитория для нахождения .git // открытие гит репозитория с опцией обхода репозитория для нахождения .git
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
} }
//извлекаем ссылки с объектами из удаленного объекта?? // извлекаем ссылки с объектами из удаленного объекта??
if err = repo.FetchContext(ctx, &git.FetchOptions{ if err = repo.FetchContext(ctx, &git.FetchOptions{
// Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, // Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err))
} //обновляем репозиторий } // обновляем репозиторий
var headRef *plumbing.Reference // вроде ссылка на гит var headRef *plumbing.Reference // вроде ссылка на гит
if g.baseRef == nil { if g.baseRef == nil {
g.baseRef, err = repo.Head() g.baseRef, err = repo.Head()
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err)) g.logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err))
} }
} }
refIter, err := repo.Branches() //получение веток refIter, err := repo.Branches() // получение веток
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err))
return err return err
} }
for { for {
@ -130,45 +132,45 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
if err != nil { if err != nil {
break break
} }
if ref.Name().Short() == branch { //todo вот тут возможно нужно переделать if ref.Name().Short() == branch { // todo вот тут возможно нужно переделать
headRef = ref headRef = ref
break break
} }
} //перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef } // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
refIter.Close() refIter.Close()
if headRef == nil { if headRef == nil {
logger.Fatal(ctx, "failed to get repo branch head") g.logger.Fatal(ctx, "failed to get repo branch head")
return err return err
} // Не получили нужную ветку } // Не получили нужную ветку
logger.Info(ctx, fmt.Sprintf("repo head %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("repo head %s", headRef))
wtree, err := repo.Worktree() //todo вроде рабочее дерево не нужно wtree, err := repo.Worktree() // todo вроде рабочее дерево не нужно
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err))
} }
defer checkout(*wtree, *g.baseRef) defer g.checkout(*wtree, *g.baseRef)
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil && err != ErrPRNotExist {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) && strings.Contains(pull.Base.Ref, branch) { if strings.Contains(pull.Title, path) && strings.Contains(pull.Base.Ref, branch) {
logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL)) g.logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL))
return g.RequestUpdate(ctx, branch, path, mod) return g.RequestUpdate(ctx, branch, path, mod)
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим } // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
} }
logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
logger.Info(ctx, "reset worktree") g.logger.Info(ctx, "reset worktree")
if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil { if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err))
} //вроде меняем ветку todo вроде можно удалить } // вроде меняем ветку todo вроде можно удалить
if err = wtree.PullContext(ctx, &git.PullOptions{ if err = wtree.PullContext(ctx, &git.PullOptions{
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
@ -177,26 +179,26 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
Force: true, Force: true,
RemoteName: "origin", RemoteName: "origin",
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) //подтягиваем изменения с удаленого репозитория g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
} }
logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
if err = wtree.Checkout(&git.CheckoutOptions{ if err = wtree.Checkout(&git.CheckoutOptions{
Hash: headRef.Hash(), Hash: headRef.Hash(),
Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)), Branch: plumbing.NewBranchReferenceName(fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)),
Create: true, Create: true,
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
return err return err
} //создаем новую ветку } // создаем новую ветку
epath, err := exec.LookPath("go") epath, err := exec.LookPath("go")
if errors.Is(err, exec.ErrDot) { if errors.Is(err, exec.ErrDot) {
err = nil err = nil
} }
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
} // ищем go файл } // ищем go файл
var cmd *exec.Cmd var cmd *exec.Cmd
@ -204,30 +206,30 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} }
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} // пытаемся выполнить команду go mod edit с новой версией модуля } // пытаемся выполнить команду go mod edit с новой версией модуля
cmd = exec.CommandContext(ctx, epath, "mod", "tidy") cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля } // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
logger.Info(ctx, "worktree add go.mod") g.logger.Info(ctx, "worktree add go.mod")
if _, err = wtree.Add("go.mod"); err != nil { if _, err = wtree.Add("go.mod"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree add go.sum") g.logger.Info(ctx, "worktree add go.sum")
if _, err = wtree.Add("go.sum"); err != nil { if _, err = wtree.Add("go.sum"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree commit") g.logger.Info(ctx, "worktree commit")
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{ _, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
Parents: []plumbing.Hash{headRef.Hash()}, Parents: []plumbing.Hash{headRef.Hash()},
Author: &object.Signature{ Author: &object.Signature{
@ -237,19 +239,19 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
}, },
}) // хотим за коммитить изменения }) // хотим за коммитить изменения
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err))
} }
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) //todo как будто нужно переделать refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec)) g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
if err = repo.PushContext(ctx, &git.PushOptions{ if err = repo.PushContext(ctx, &git.PushOptions{
RefSpecs: []gitconfig.RefSpec{refspec}, RefSpecs: []gitconfig.RefSpec{refspec},
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err))
} // пытаемся за пушить изменения } // пытаемся за пушить изменения
body := map[string]string{ body := map[string]string{
@ -258,14 +260,14 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
"head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version), "head": fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version),
"title": wTitle.String(), "title": wTitle.String(),
} }
logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body)) g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
buf, err = json.Marshal(body) buf, err = json.Marshal(body)
if err != nil { if err != nil {
return err return err
} }
logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf)) g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
req, err := http.NewRequestWithContext( req, err := http.NewRequestWithContext(
ctx, ctx,
@ -283,34 +285,37 @@ func (g *Github) RequestOpen(ctx context.Context, branch string, path string, mo
rsp, err := http.DefaultClient.Do(req) rsp, err := http.DefaultClient.Do(req)
if err != nil { if err != nil {
return err return err
} //Вроде создаем новый реквест на создание пулл реквеста } // Вроде создаем новый реквест на создание пулл реквеста
if rsp.StatusCode != http.StatusCreated { if rsp.StatusCode != http.StatusCreated {
buf, _ = io.ReadAll(rsp.Body) buf, _ = io.ReadAll(rsp.Body)
return fmt.Errorf("unknown error: %s", buf) return fmt.Errorf("unknown error: %s", buf)
} }
logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
repo, err = git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err = git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
} }
return nil return nil
} }
func (g *Github) RequestClose(ctx context.Context, branch string, path string) error { func (g *Github) RequestClose(ctx context.Context, branch string, path string) error {
return fmt.Errorf("implement me") return fmt.Errorf("implement me")
} }
func (g *Github) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Github) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
return fmt.Errorf("implement me") return fmt.Errorf("implement me")
} }
func (g *Github) RequestList(ctx context.Context, branch string) (map[string]string, error) { func (g *Github) RequestList(ctx context.Context, branch string) (map[string]string, error) {
logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch)) g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
var err error var err error
g.pulls, err = GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return nil, err return nil, err
} }
@ -318,16 +323,16 @@ func (g *Github) RequestList(ctx context.Context, branch string) (map[string]str
rMap := make(map[string]string) rMap := make(map[string]string)
for _, pull := range g.pulls { for _, pull := range g.pulls {
if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { //добавляем только реквесты бота по обновлению модулей if !strings.HasPrefix(pull.Title, "Bump ") || pull.Base.Ref != branch { // добавляем только реквесты бота по обновлению модулей
continue continue
} }
path = strings.Split(pull.Title, " ")[1] //todo Работет только для дефолтного шаблона path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
rMap[path] = pull.Title rMap[path] = pull.Title
} }
return rMap, nil return rMap, nil
} }
func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*githubPull, error) { func (g *Github) GetPulls(ctx context.Context, url, owner, repo, password string) ([]*githubPull, error) {
var pullsAll []*githubPull var pullsAll []*githubPull
page := 1 page := 1
@ -340,7 +345,7 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*github
nil) nil)
if err != nil { if err != nil {
return nil, err return nil, err
} //вроде запроса к репозиторию } // вроде запроса к репозиторию
req.Header.Add("Accept", "application/json") req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json") req.Header.Add("Content-Type", "application/json")
@ -356,13 +361,13 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*github
switch rsp.StatusCode { switch rsp.StatusCode {
case http.StatusOK: case http.StatusOK:
if err = json.Unmarshal(buf, &pulls); err != nil { if err = json.Unmarshal(buf, &pulls); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err)) g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
return nil, err return nil, err
} }
pullsAll = append(pullsAll, pulls...) pullsAll = append(pullsAll, pulls...)
page++ page++
case http.StatusNotFound: case http.StatusNotFound:
logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo)) g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", repo))
return nil, ErrPRNotExist return nil, ErrPRNotExist
default: default:
return nil, fmt.Errorf("unknown error: %s", buf) return nil, fmt.Errorf("unknown error: %s", buf)
@ -376,9 +381,9 @@ func GetPulls(ctx context.Context, url, owner, repo, password string) ([]*github
return pullsAll, nil return pullsAll, nil
} }
func checkout(w git.Worktree, ref plumbing.Reference) { func (g *Github) checkout(w git.Worktree, ref plumbing.Reference) {
ctx := context.Background() ctx := context.Background()
logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short())) g.logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short()))
if err := w.Checkout(&git.CheckoutOptions{ if err := w.Checkout(&git.CheckoutOptions{
Branch: ref.Name(), Branch: ref.Name(),
@ -386,6 +391,6 @@ func checkout(w git.Worktree, ref plumbing.Reference) {
Force: true, Force: true,
Keep: false, Keep: false,
}); err != nil { }); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err))
} }
} }

View File

@ -15,19 +15,20 @@ import (
"text/template" "text/template"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/configcli"
"git.unistack.org/unistack-org/pkgdash/internal/modules"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
gitconfig "github.com/go-git/go-git/v5/config" gitconfig "github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
httpauth "github.com/go-git/go-git/v5/plumbing/transport/http" httpauth "github.com/go-git/go-git/v5/plumbing/transport/http"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/configcli"
"go.unistack.org/pkgdash/internal/modules"
) )
var ErrPRNotExist = errors.New("pull request does not exist") var ErrPRNotExist = errors.New("pull request does not exist")
type Gitlab struct { type Gitlab struct {
logger logger.Logger
URL string URL string
Username string Username string
Password string Password string
@ -40,8 +41,9 @@ type Gitlab struct {
baseRef *plumbing.Reference baseRef *plumbing.Reference
} }
func NewGitlab(cfg configcli.Config) *Gitlab { func NewGitlab(cfg configcli.Config, log logger.Logger) *Gitlab {
return &Gitlab{ return &Gitlab{
logger: log,
URL: cfg.Source.APIURL, URL: cfg.Source.APIURL,
Username: cfg.Source.Username, Username: cfg.Source.Username,
Password: cfg.Source.Password, Password: cfg.Source.Password,
@ -70,21 +72,21 @@ func (g *Gitlab) Name() string {
} }
func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestOpen start, mod title: %s", path))
var buf []byte var buf []byte
var err error var err error
// создания шаблона названия для пулл реквеста // создания шаблона названия для пулл реквеста
tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle) tplTitle, err := template.New("pull_request_title").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wTitle := bytes.NewBuffer(nil) wTitle := bytes.NewBuffer(nil)
// создания шаблона тела для пулл реквеста // создания шаблона тела для пулл реквеста
tplBody, err := template.New("pull_request_body").Parse(g.PRTitle) tplBody, err := template.New("pull_request_body").Parse(g.PRTitle)
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to parse template: %v", err))
} }
wBody := bytes.NewBuffer(nil) wBody := bytes.NewBuffer(nil)
@ -96,83 +98,83 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
} }
if err = tplTitle.Execute(wTitle, data); err != nil { if err = tplTitle.Execute(wTitle, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
if err = tplBody.Execute(wBody, data); err != nil { if err = tplBody.Execute(wBody, data); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to execute template: %v", err))
} }
// открытие гит репозитория с опцией обхода репозитория для нахождения .git // открытие гит репозитория с опцией обхода репозитория для нахождения .git
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true}) repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to open repo: %v", err))
} }
//извлекаем ссылки с объектами из удаленного объекта?? // извлекаем ссылки с объектами из удаленного объекта??
if err = repo.FetchContext(ctx, &git.FetchOptions{ if err = repo.FetchContext(ctx, &git.FetchOptions{
// Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, // Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to fetch repo : %v", err))
} //обновляем репозиторий } // обновляем репозиторий
var headRef *plumbing.Reference // вроде ссылка на гит var headRef *plumbing.Reference // вроде ссылка на гит
if g.baseRef == nil { if g.baseRef == nil {
g.baseRef, err = repo.Head() g.baseRef, err = repo.Head()
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err)) g.logger.Fatal(ctx, fmt.Sprintf("Error head: %s", err))
} }
} }
refIter, err := repo.Branches() //получение веток refIter, err := repo.Branches() // получение веток
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get branches: %v", err))
} }
for { for {
ref, err := refIter.Next() ref, err := refIter.Next()
if err != nil { if err != nil {
break break
} }
if ref.Name().Short() == branch { //todo вот тут возможно нужно переделать if ref.Name().Short() == branch { // todo вот тут возможно нужно переделать
headRef = ref headRef = ref
break break
} }
} //перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef } // перебираем получение ветки и когда находим нужную выходим из цикла записав ветку в headRef
refIter.Close() refIter.Close()
if headRef == nil { if headRef == nil {
logger.Fatal(ctx, "failed to get repo branch head") g.logger.Fatal(ctx, "failed to get repo branch head")
return err return err
} // Не получили нужную ветку } // Не получили нужную ветку
logger.Info(ctx, fmt.Sprintf("repo head %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("repo head %s", headRef))
wtree, err := repo.Worktree() //todo вроде рабочее дерево не нужно wtree, err := repo.Worktree() // todo вроде рабочее дерево не нужно
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to get worktree: %v", err))
} }
defer checkout(*wtree, *g.baseRef) defer g.checkout(*wtree, *g.baseRef)
g.pulls, err = GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
if err != nil { if err != nil && err != ErrPRNotExist {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) { if strings.Contains(pull.Title, path) {
logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL)) g.logger.Info(ctx, fmt.Sprintf("PR for %s exists %s, call RequestUpdate", path, pull.URL))
return g.RequestUpdate(ctx, branch, path, mod) return g.RequestUpdate(ctx, branch, path, mod)
} // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим } // хотим проверить есть ли пулл реквест для этой ветки, если есть то выходим
} }
logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("update %s from %s to %s", path, mod.Module.Version, mod.Version))
sourceBranch := fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version) sourceBranch := fmt.Sprintf("pkgdash/go_modules/%s-%s", path, mod.Version)
logger.Info(ctx, "reset worktree") g.logger.Info(ctx, "reset worktree")
if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil { if err = wtree.Reset(&git.ResetOptions{Commit: headRef.Hash(), Mode: git.HardReset}); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset repo branch: %v", err))
} }
if err = wtree.PullContext(ctx, &git.PullOptions{ if err = wtree.PullContext(ctx, &git.PullOptions{
@ -182,26 +184,26 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
Force: true, Force: true,
RemoteName: "origin", RemoteName: "origin",
}); err != nil && err != git.NoErrAlreadyUpToDate { }); err != nil && err != git.NoErrAlreadyUpToDate {
logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) //подтягиваем изменения с удаленого репозитория g.logger.Error(ctx, fmt.Sprintf("failed to pull repo: %v", err)) // подтягиваем изменения с удаленого репозитория
} }
logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef)) g.logger.Info(ctx, fmt.Sprintf("checkout ref %s", headRef))
if err = wtree.Checkout(&git.CheckoutOptions{ if err = wtree.Checkout(&git.CheckoutOptions{
Hash: headRef.Hash(), Hash: headRef.Hash(),
Branch: plumbing.NewBranchReferenceName(sourceBranch), Branch: plumbing.NewBranchReferenceName(sourceBranch),
Create: true, Create: true,
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to checkout tree: %v", err))
return err return err
} //создаем новую ветку } // создаем новую ветку
epath, err := exec.LookPath("go") epath, err := exec.LookPath("go")
if errors.Is(err, exec.ErrDot) { if errors.Is(err, exec.ErrDot) {
err = nil err = nil
} }
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to find go command: %v", err))
} // ищем go файл } // ищем go файл
var cmd *exec.Cmd var cmd *exec.Cmd
@ -209,30 +211,30 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-droprequire=%s", mod.Module.Path))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} }
cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version)) cmd = exec.CommandContext(ctx, epath, "mod", "edit", fmt.Sprintf("-require=%s@%s", path, mod.Version))
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod edit: %s err: %v", out, err))
} // пытаемся выполнить команду go mod edit с новой версией модуля } // пытаемся выполнить команду go mod edit с новой версией модуля
cmd = exec.CommandContext(ctx, epath, "mod", "tidy") cmd = exec.CommandContext(ctx, epath, "mod", "tidy")
if out, err = cmd.CombinedOutput(); err != nil { if out, err = cmd.CombinedOutput(); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to run go mod tidy: %s err: %v", out, err))
} // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля } // пытаемся выполнить команду go mod tidy пытаемся подтянуть новую версию модуля
logger.Info(ctx, "worktree add go.mod") g.logger.Info(ctx, "worktree add go.mod")
if _, err = wtree.Add("go.mod"); err != nil { if _, err = wtree.Add("go.mod"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree add go.sum") g.logger.Info(ctx, "worktree add go.sum")
if _, err = wtree.Add("go.sum"); err != nil { if _, err = wtree.Add("go.sum"); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to add file: %v", err))
} }
logger.Info(ctx, "worktree commit") g.logger.Info(ctx, "worktree commit")
_, err = wtree.Commit(wTitle.String(), &git.CommitOptions{ _, err = wtree.Commit(wTitle.String(), &git.CommitOptions{
Parents: []plumbing.Hash{headRef.Hash()}, Parents: []plumbing.Hash{headRef.Hash()},
Author: &object.Signature{ Author: &object.Signature{
@ -242,19 +244,19 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
}, },
}) // хотим за коммитить изменения }) // хотим за коммитить изменения
if err != nil { if err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to commit: %v", err))
} }
refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) //todo как будто нужно переделать refspec := gitconfig.RefSpec(fmt.Sprintf("+refs/heads/pkgdash/go_modules/%s-%s:refs/heads/pkgdash/go_modules/%s-%s", path, mod.Version, path, mod.Version)) // todo как будто нужно переделать
logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec)) g.logger.Info(ctx, fmt.Sprintf("try to push refspec %s", refspec))
if err = repo.PushContext(ctx, &git.PushOptions{ if err = repo.PushContext(ctx, &git.PushOptions{
RefSpecs: []gitconfig.RefSpec{refspec}, RefSpecs: []gitconfig.RefSpec{refspec},
Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password}, Auth: &httpauth.BasicAuth{Username: g.Username, Password: g.Password},
Force: true, Force: true,
}); err != nil { }); err != nil {
logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err)) g.logger.Fatal(ctx, fmt.Sprintf("failed to push repo branch: %v", err))
} // пытаемся за пушить изменения } // пытаемся за пушить изменения
body := map[string]string{ body := map[string]string{
@ -264,14 +266,14 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
"title": wTitle.String(), "title": wTitle.String(),
"description": wBody.String(), "description": wBody.String(),
} }
logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body)) g.logger.Info(ctx, fmt.Sprintf("raw body: %#+v", body))
buf, err = json.Marshal(body) buf, err = json.Marshal(body)
if err != nil { if err != nil {
return err return err
} }
logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf)) g.logger.Info(ctx, fmt.Sprintf("marshal body: %s", buf))
req, err := http.NewRequestWithContext( req, err := http.NewRequestWithContext(
ctx, ctx,
@ -289,24 +291,24 @@ func (g *Gitlab) RequestOpen(ctx context.Context, branch string, path string, mo
rsp, err := http.DefaultClient.Do(req) rsp, err := http.DefaultClient.Do(req)
if err != nil { if err != nil {
return err return err
} //Вроде создаем новый реквест на создание пулл реквеста } // Вроде создаем новый реквест на создание пулл реквеста
if rsp.StatusCode != http.StatusCreated { if rsp.StatusCode != http.StatusCreated {
buf, _ = io.ReadAll(rsp.Body) buf, _ = io.ReadAll(rsp.Body)
return fmt.Errorf("unknown error: %s", buf) return fmt.Errorf("unknown error: %s", buf)
} }
logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version)) g.logger.Info(ctx, fmt.Sprintf("PR create for %s-%s", path, mod.Version))
return nil return nil
} }
func (g *Gitlab) RequestClose(ctx context.Context, branch string, path string) error { func (g *Gitlab) RequestClose(ctx context.Context, branch string, path string) error {
logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestClose start, mod title: %s", path))
var err error var err error
g.pulls, err = GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
@ -314,72 +316,72 @@ func (g *Gitlab) RequestClose(ctx context.Context, branch string, path string) e
var b string // Name of the branch to be deleted var b string // Name of the branch to be deleted
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) { if strings.Contains(pull.Title, path) {
logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL)) g.logger.Info(ctx, fmt.Sprintf("PR for %s exists: %s", path, pull.URL))
prExist = true prExist = true
b = pull.Source b = pull.Source
} }
} }
if !prExist { if !prExist {
logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path)) g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
return ErrPRNotExist return ErrPRNotExist
} }
req, err := DeleteBranch(ctx, g.URL, g.RepositoryId, b, g.Password) req, err := g.DeleteBranch(ctx, g.URL, g.RepositoryId, b, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err)) g.logger.Error(ctx, fmt.Sprintf("failed to create request for delete the branch: %s, err: %s", branch, err))
return err return err
} }
rsp, err := http.DefaultClient.Do(req) rsp, err := http.DefaultClient.Do(req)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode)) g.logger.Error(ctx, fmt.Sprintf("failed to do request for delete the branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
return err return err
} }
logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path)) g.logger.Info(ctx, fmt.Sprintf("Delete branch for %s successful", path))
return nil return nil
} }
func (g *Gitlab) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gitlab) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path)) g.logger.Debug(ctx, fmt.Sprintf("RequestUpdate start, mod title: %s", path))
var err error var err error
g.RepositoryId, err = GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password) g.RepositoryId, err = g.GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil || g.RepositoryId == "" { if err != nil || g.RepositoryId == "" {
return fmt.Errorf("project id is empty") return fmt.Errorf("project id is empty")
} }
g.pulls, err = GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return err return err
} }
prExist := false prExist := false
for _, pull := range g.pulls { for _, pull := range g.pulls {
if strings.Contains(pull.Title, path) { if strings.Contains(pull.Title, path) {
logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) //todo g.logger.Info(ctx, fmt.Sprintf("don't skip %s since pr exist %s", path, pull.URL)) // todo
tVersion := getVersions(pull.Source) //Надо взять просто из названия ветки последнюю версию tVersion := getVersions(pull.Source) // Надо взять просто из названия ветки последнюю версию
if modules.IsNewerVersion(tVersion, mod.Version, false) { if modules.IsNewerVersion(tVersion, mod.Version, false) {
reqDel, err := DeleteBranch(ctx, g.URL, g.RepositoryId, pull.Source, g.Password) reqDel, err := g.DeleteBranch(ctx, g.URL, g.RepositoryId, pull.Source, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("Error with create request for branch: %s, err: %s", branch, err)) g.logger.Error(ctx, fmt.Sprintf("Error with create request for branch: %s, err: %s", branch, err))
return err return err
} }
rsp, err := http.DefaultClient.Do(reqDel) rsp, err := http.DefaultClient.Do(reqDel)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("Error with do request for branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode)) g.logger.Error(ctx, fmt.Sprintf("Error with do request for branch: %s, err: %s, code: %v", branch, err, rsp.StatusCode))
return err return err
} }
logger.Info(ctx, fmt.Sprintf("Old pr %s successful delete", pull.Source)) g.logger.Info(ctx, fmt.Sprintf("Old pr %s successful delete", pull.Source))
} else { } else {
logger.Debug(ctx, "The existing PR is relevant") g.logger.Debug(ctx, "The existing PR is relevant")
return nil return nil
} }
prExist = true prExist = true
} }
} }
if !prExist { if !prExist {
logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path)) g.logger.Error(ctx, fmt.Sprintf("skip %s since pr does not exist", path))
return ErrPRNotExist return ErrPRNotExist
} }
@ -387,17 +389,17 @@ func (g *Gitlab) RequestUpdate(ctx context.Context, branch string, path string,
} }
func (g *Gitlab) RequestList(ctx context.Context, branch string) (map[string]string, error) { func (g *Gitlab) RequestList(ctx context.Context, branch string) (map[string]string, error) {
logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch)) g.logger.Debug(ctx, fmt.Sprintf("RequestList for %s", branch))
var err error var err error
g.RepositoryId, err = GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password) g.RepositoryId, err = g.GetRepoID(ctx, g.URL, g.Owner, g.Repository, g.Password)
if err != nil || g.RepositoryId == "" { if err != nil || g.RepositoryId == "" {
return nil, fmt.Errorf("project id is empty") return nil, fmt.Errorf("project id is empty")
} }
g.pulls, err = GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password) g.pulls, err = g.GetPulls(ctx, g.URL, g.RepositoryId, branch, g.Password)
if err != nil { if err != nil {
logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err)) g.logger.Error(ctx, fmt.Sprintf("GetPulls error: %s", err))
return nil, err return nil, err
} }
@ -405,10 +407,10 @@ func (g *Gitlab) RequestList(ctx context.Context, branch string) (map[string]str
rMap := make(map[string]string) rMap := make(map[string]string)
for _, pull := range g.pulls { for _, pull := range g.pulls {
if !strings.HasPrefix(pull.Title, "Bump ") { //добавляем только реквесты бота по обновлению модулей if !strings.HasPrefix(pull.Title, "Bump ") { // добавляем только реквесты бота по обновлению модулей
continue continue
} }
path = strings.Split(pull.Title, " ")[1] //todo Работет только для дефолтного шаблона path = strings.Split(pull.Title, " ")[1] // todo Работет только для дефолтного шаблона
rMap[path] = pull.Title rMap[path] = pull.Title
} }
return rMap, nil return rMap, nil
@ -422,7 +424,7 @@ func getVersions(s string) string {
return version return version
} }
func DeleteBranch(ctx context.Context, url, projectId, branch, password string) (*http.Request, error) { func (g *Gitlab) DeleteBranch(ctx context.Context, url, projectId, branch, password string) (*http.Request, error) {
var buf []byte var buf []byte
req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v4/projects/%s/repository/branches/%s", url, projectId, branch), bytes.NewReader(buf)) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, fmt.Sprintf("https://%s/api/v4/projects/%s/repository/branches/%s", url, projectId, branch), bytes.NewReader(buf))
if err != nil { if err != nil {
@ -434,7 +436,7 @@ func DeleteBranch(ctx context.Context, url, projectId, branch, password string)
return req, err return req, err
} }
func GetPulls(ctx context.Context, url, projectId, branch, password string) ([]*gitlabPull, error) { func (g *Gitlab) GetPulls(ctx context.Context, url, projectId, branch, password string) ([]*gitlabPull, error) {
pulls := make([]*gitlabPull, 0, 10) pulls := make([]*gitlabPull, 0, 10)
req, err := http.NewRequestWithContext( req, err := http.NewRequestWithContext(
ctx, ctx,
@ -443,7 +445,7 @@ func GetPulls(ctx context.Context, url, projectId, branch, password string) ([]*
nil) nil)
if err != nil { if err != nil {
return nil, err return nil, err
} //вроде запроса к репозиторию } // вроде запроса к репозиторию
req.Header.Add("Accept", "application/json") req.Header.Add("Accept", "application/json")
req.Header.Add("Content-Type", "application/json") req.Header.Add("Content-Type", "application/json")
@ -459,19 +461,19 @@ func GetPulls(ctx context.Context, url, projectId, branch, password string) ([]*
switch rsp.StatusCode { switch rsp.StatusCode {
case http.StatusOK: case http.StatusOK:
if err = json.Unmarshal(buf, &pulls); err != nil { if err = json.Unmarshal(buf, &pulls); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err)) g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
return nil, err return nil, err
} }
return pulls, nil return pulls, nil
case http.StatusNotFound: case http.StatusNotFound:
logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", projectId)) g.logger.Info(ctx, fmt.Sprintf("pull-request is not exist for %s", projectId))
return nil, ErrPRNotExist return nil, ErrPRNotExist
default: default:
return nil, fmt.Errorf("unknown error: %s", buf) return nil, fmt.Errorf("unknown error: %s", buf)
} }
} }
func GetRepoID(ctx context.Context, url, owner, repo, password string) (rId string, err error) { func (g *Gitlab) GetRepoID(ctx context.Context, url, owner, repo, password string) (rId string, err error) {
var buf []byte var buf []byte
projects := make([]*gitlabProject, 0, 10) projects := make([]*gitlabProject, 0, 10)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, fmt.Sprintf("https://%s/api/v4/users/%s/projects?owned=true", url, owner), nil) req, err := http.NewRequestWithContext(ctx, http.MethodGet, fmt.Sprintf("https://%s/api/v4/users/%s/projects?owned=true", url, owner), nil)
@ -492,7 +494,7 @@ func GetRepoID(ctx context.Context, url, owner, repo, password string) (rId stri
switch rsp.StatusCode { switch rsp.StatusCode {
case http.StatusOK: case http.StatusOK:
if err = json.Unmarshal(buf, &projects); err != nil { if err = json.Unmarshal(buf, &projects); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err)) g.logger.Error(ctx, fmt.Sprintf("failed to decode response %s err: %v", buf, err))
} }
for _, p := range projects { for _, p := range projects {
if p.Name == repo { if p.Name == repo {
@ -505,9 +507,9 @@ func GetRepoID(ctx context.Context, url, owner, repo, password string) (rId stri
} }
} }
func checkout(w git.Worktree, ref plumbing.Reference) { func (g *Gitlab) checkout(w git.Worktree, ref plumbing.Reference) {
ctx := context.Background() ctx := context.Background()
logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short())) g.logger.Debug(ctx, fmt.Sprintf("Checkout: %s", ref.Name().Short()))
if err := w.Checkout(&git.CheckoutOptions{ if err := w.Checkout(&git.CheckoutOptions{
Branch: ref.Name(), Branch: ref.Name(),
@ -515,6 +517,6 @@ func checkout(w git.Worktree, ref plumbing.Reference) {
Force: true, Force: true,
Keep: false, Keep: false,
}); err != nil { }); err != nil {
logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err)) g.logger.Error(ctx, fmt.Sprintf("failed to reset: %v", err))
} }
} }

View File

@ -4,19 +4,22 @@ import (
"context" "context"
"fmt" "fmt"
"git.unistack.org/unistack-org/pkgdash/internal/configcli" "go.unistack.org/micro/v3/logger"
"git.unistack.org/unistack-org/pkgdash/internal/modules" "go.unistack.org/pkgdash/internal/configcli"
"go.unistack.org/pkgdash/internal/modules"
) )
type Gogs struct { type Gogs struct {
logger logger.Logger
Username string Username string
Password string Password string
} }
func NewGogs(cfg configcli.Config) *Gogs { func NewGogs(cfg configcli.Config, log logger.Logger) *Gogs {
return &Gogs{ return &Gogs{
logger: log,
Username: cfg.Source.Username, Username: cfg.Source.Username,
Password: cfg.Source.Password, Password: cfg.Source.Password,
} }
} }
@ -27,12 +30,15 @@ func (g *Gogs) Name() string {
func (g *Gogs) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gogs) RequestOpen(ctx context.Context, branch string, path string, mod modules.Update) error {
return fmt.Errorf("implement me") return fmt.Errorf("implement me")
} }
func (g *Gogs) RequestClose(ctx context.Context, branch string, path string) error { func (g *Gogs) RequestClose(ctx context.Context, branch string, path string) error {
return fmt.Errorf("implement me") return fmt.Errorf("implement me")
} }
func (g *Gogs) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error { func (g *Gogs) RequestUpdate(ctx context.Context, branch string, path string, mod modules.Update) error {
return fmt.Errorf("implement me") return fmt.Errorf("implement me")
} }
func (g *Gogs) RequestList(ctx context.Context, branch string) (map[string]string, error) { func (g *Gogs) RequestList(ctx context.Context, branch string) (map[string]string, error) {
return nil, fmt.Errorf("implement me") return nil, fmt.Errorf("implement me")
} }

View File

@ -3,12 +3,13 @@ package source
import ( import (
"context" "context"
"git.unistack.org/unistack-org/pkgdash/internal/configcli" "go.unistack.org/micro/v3/logger"
"git.unistack.org/unistack-org/pkgdash/internal/modules" "go.unistack.org/pkgdash/internal/configcli"
"git.unistack.org/unistack-org/pkgdash/internal/source/gitea" "go.unistack.org/pkgdash/internal/modules"
"git.unistack.org/unistack-org/pkgdash/internal/source/github" "go.unistack.org/pkgdash/internal/source/gitea"
"git.unistack.org/unistack-org/pkgdash/internal/source/gitlab" "go.unistack.org/pkgdash/internal/source/github"
"git.unistack.org/unistack-org/pkgdash/internal/source/gogs" "go.unistack.org/pkgdash/internal/source/gitlab"
"go.unistack.org/pkgdash/internal/source/gogs"
) )
type SourceControl interface { type SourceControl interface {
@ -19,16 +20,16 @@ type SourceControl interface {
RequestList(ctx context.Context, branch string) (map[string]string, error) RequestList(ctx context.Context, branch string) (map[string]string, error)
} }
func NewSourceControl(cfg configcli.Config) SourceControl { func NewSourceControl(cfg configcli.Config, log logger.Logger) SourceControl {
switch cfg.Source.TypeGit { switch cfg.Source.TypeGit {
case "github": case "github":
return github.NewGithub(cfg) return github.NewGithub(cfg, log)
case "gitlab": case "gitlab":
return gitlab.NewGitlab(cfg) return gitlab.NewGitlab(cfg, log)
case "gitea": case "gitea":
return gitea.NewGitea(cfg) return gitea.NewGitea(cfg, log)
case "gogs": case "gogs":
return gogs.NewGogs(cfg) return gogs.NewGogs(cfg, log)
} }
return nil return nil
} }

View File

@ -10,14 +10,14 @@ import (
"fmt" "fmt"
"strings" "strings"
"git.unistack.org/unistack-org/pkgdash/internal/config" "go.unistack.org/pkgdash/internal/config"
"git.unistack.org/unistack-org/pkgdash/internal/models" "go.unistack.org/pkgdash/internal/models"
pb "git.unistack.org/unistack-org/pkgdash/proto" pb "go.unistack.org/pkgdash/proto"
"github.com/golang-migrate/migrate/v4" "github.com/golang-migrate/migrate/v3"
mpgx "github.com/golang-migrate/migrate/v4/database/pgx" mpgx "github.com/golang-migrate/migrate/v4/database/pgx"
"github.com/golang-migrate/migrate/v4/source/iofs" "github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/lib/pq" "github.com/lib/pq"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
) )
const ( const (

View File

@ -6,27 +6,26 @@ import (
"fmt" "fmt"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/models"
"git.unistack.org/unistack-org/pkgdash/internal/storage"
pb "git.unistack.org/unistack-org/pkgdash/proto"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/models"
"go.unistack.org/pkgdash/internal/storage"
pb "go.unistack.org/pkgdash/proto"
) )
func init() { func init() {
storage.RegisterStorage("sqlite", NewStorage()) storage.RegisterStorage("sqlite", NewStorage)
} }
var _ storage.Storage = (*Sqlite)(nil) var _ storage.Storage = (*Sqlite)(nil)
type Sqlite struct { type Sqlite struct {
db *sqlx.DB logger logger.Logger
db *sqlx.DB
} }
func NewStorage() func(*sqlx.DB) interface{} { func NewStorage(log logger.Logger, db *sqlx.DB) interface{} {
return func(db *sqlx.DB) interface{} { return &Sqlite{db: db, logger: log}
return &Sqlite{db: db}
}
} }
func (s *Sqlite) PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error { func (s *Sqlite) PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error {
@ -126,7 +125,7 @@ func (s *Sqlite) CommentCreate(ctx context.Context, req *pb.CommentCreateReq) (*
defer func() { defer func() {
if err != nil { if err != nil {
if rollbackErr := tx.Rollback(); rollbackErr != nil { if rollbackErr := tx.Rollback(); rollbackErr != nil {
logger.Error(ctx, "AddComment: unable to rollback: %v", rollbackErr) s.logger.Error(ctx, "AddComment: unable to rollback: %v", rollbackErr)
} }
} else { } else {
err = tx.Commit() err = tx.Commit()

View File

@ -5,16 +5,17 @@ import (
"errors" "errors"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/models"
pb "git.unistack.org/unistack-org/pkgdash/proto"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/models"
pb "go.unistack.org/pkgdash/proto"
) )
func RegisterStorage(name string, fn func(*sqlx.DB) interface{}) { func RegisterStorage(name string, fn func(logger.Logger, *sqlx.DB) interface{}) {
storages[name] = fn storages[name] = fn
} }
var storages = map[string]func(*sqlx.DB) interface{}{} var storages = map[string]func(logger.Logger, *sqlx.DB) interface{}{}
type Storage interface { type Storage interface {
PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error PackageModulesCreate(ctx context.Context, pkg *models.Package, modules []*models.Module) error
@ -35,12 +36,12 @@ type Storage interface {
ModuleCreate(ctx context.Context, modules []*models.Module) error ModuleCreate(ctx context.Context, modules []*models.Module) error
} }
func NewStorage(name string, db *sqlx.DB) (Storage, error) { func NewStorage(name string, log logger.Logger, db *sqlx.DB) (Storage, error) {
function, ok := storages[name] fn, ok := storages[name]
if !ok { if !ok {
return nil, errors.New("incorrect name store") return nil, errors.New("incorrect name store")
} }
store := function(db) store := fn(log, db)
database, ok := store.(Storage) database, ok := store.(Storage)
if !ok { if !ok {
return nil, errors.New("dont implements interface Storage") return nil, errors.New("dont implements interface Storage")

View File

@ -6,8 +6,8 @@ import (
"fmt" "fmt"
"testing" "testing"
"git.unistack.org/unistack-org/pkgdash/internal/storage/sqlite" "go.unistack.org/pkgdash/internal/storage/sqlite"
pb "git.unistack.org/unistack-org/pkgdash/proto" pb "go.unistack.org/pkgdash/proto"
) )
func TestGetModule(t *testing.T) { func TestGetModule(t *testing.T) {

View File

@ -12,20 +12,20 @@ import (
"sync" "sync"
"time" "time"
"git.unistack.org/unistack-org/pkgdash/internal/models"
"git.unistack.org/unistack-org/pkgdash/internal/modules"
"git.unistack.org/unistack-org/pkgdash/internal/storage"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing/filemode" "github.com/go-git/go-git/v5/plumbing/filemode"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/storage/memory" "github.com/go-git/go-git/v5/storage/memory"
"github.com/pkg/errors" "github.com/pkg/errors"
"go.unistack.org/micro/v4/logger" "go.unistack.org/micro/v3/logger"
"go.unistack.org/pkgdash/internal/models"
"go.unistack.org/pkgdash/internal/modules"
"go.unistack.org/pkgdash/internal/storage"
"golang.org/x/mod/modfile" "golang.org/x/mod/modfile"
"golang.org/x/mod/module" "golang.org/x/mod/module"
) )
func Run(ctx context.Context, store storage.Storage, td time.Duration) { func Run(ctx context.Context, log logger.Logger, store storage.Storage, td time.Duration) {
modTicker := time.NewTicker(5 * time.Second) modTicker := time.NewTicker(5 * time.Second)
defer modTicker.Stop() defer modTicker.Stop()
pkgTicker := time.NewTicker(5 * time.Second) pkgTicker := time.NewTicker(5 * time.Second)
@ -42,13 +42,13 @@ func Run(ctx context.Context, store storage.Storage, td time.Duration) {
if err != sql.ErrNoRows { if err != sql.ErrNoRows {
continue continue
} }
logger.Fatal(ctx, "failed to get packages to process: %v", err) log.Fatal(ctx, "failed to get packages to process: %v", err)
} }
wg.Add(len(packages)) wg.Add(len(packages))
for _, pkg := range packages { for _, pkg := range packages {
go func(p *models.Package) { go func(p *models.Package) {
if err := parseModFile(ctx, store, p); err != nil { if err := parseModFile(ctx, log, store, p); err != nil {
logger.Error(ctx, "failed to process package %s: %v", p.Name, err) log.Error(ctx, "failed to process package %s: %v", p.Name, err)
} }
p.LastCheck.Time = time.Now() p.LastCheck.Time = time.Now()
wg.Done() wg.Done()
@ -56,7 +56,7 @@ func Run(ctx context.Context, store storage.Storage, td time.Duration) {
} }
wg.Wait() wg.Wait()
if err = store.PackagesUpdateLastCheck(ctx, packages); err != nil { if err = store.PackagesUpdateLastCheck(ctx, packages); err != nil {
logger.Error(ctx, "update packages last_check %#+v, err: %v", packages, err) log.Error(ctx, "update packages last_check %#+v, err: %v", packages, err)
} }
case <-modTicker.C: case <-modTicker.C:
modules, err := store.ModulesProcess(ctx, td) modules, err := store.ModulesProcess(ctx, td)
@ -64,17 +64,17 @@ func Run(ctx context.Context, store storage.Storage, td time.Duration) {
if err != sql.ErrNoRows { if err != sql.ErrNoRows {
continue continue
} }
logger.Fatal(ctx, "failed to get modules to process: %v", err) log.Fatal(ctx, "failed to get modules to process: %v", err)
} }
if err := processModules(ctx, store, modules); err != nil { if err := processModules(ctx, log, store, modules); err != nil {
logger.Error(ctx, "failed to process modules: %v", err) log.Error(ctx, "failed to process modules: %v", err)
} }
} }
} }
} }
func parseModFile(ctx context.Context, store storage.Storage, pkg *models.Package) error { func parseModFile(ctx context.Context, log logger.Logger, store storage.Storage, pkg *models.Package) error {
logger.Info(ctx, "process package %v", pkg) log.Info(ctx, "process package %v", pkg)
u, err := url.Parse(pkg.URL) u, err := url.Parse(pkg.URL)
if err != nil { if err != nil {
@ -125,7 +125,7 @@ func parseModFile(ctx context.Context, store storage.Storage, pkg *models.Packag
err = tree.Files().ForEach(func(file *object.File) error { err = tree.Files().ForEach(func(file *object.File) error {
if file == nil { if file == nil {
err = errors.New("file pointer is nil") err = errors.New("file pointer is nil")
logger.Error(ctx, "file tree error", err) log.Error(ctx, "file tree error", err)
return err return err
} }
@ -156,14 +156,14 @@ func parseModFile(ctx context.Context, store storage.Storage, pkg *models.Packag
}) })
if err = store.PackageModulesCreate(ctx, pkg, modules); err != nil { if err = store.PackageModulesCreate(ctx, pkg, modules); err != nil {
logger.Error(ctx, "failed to set create modules: %v", err) log.Error(ctx, "failed to set create modules: %v", err)
return err return err
} }
return nil return nil
} }
func processModules(ctx context.Context, store storage.Storage, mods []*models.Module) error { func processModules(ctx context.Context, log logger.Logger, store storage.Storage, mods []*models.Module) error {
mvs := make(map[string]*models.Module, len(mods)) mvs := make(map[string]*models.Module, len(mods))
for _, mod := range mods { for _, mod := range mods {
@ -182,7 +182,7 @@ func processModules(ctx context.Context, store storage.Storage, mods []*models.M
Modules: mvsu, Modules: mvsu,
OnUpdate: func(u modules.Update) { OnUpdate: func(u modules.Update) {
if u.Err != nil { if u.Err != nil {
logger.Error(ctx, "%s: failed: %v", u.Module.Path, u.Err) log.Error(ctx, "%s: failed: %v", u.Module.Path, u.Err)
} else { } else {
mvs[u.Module.Path].Version = u.Version mvs[u.Module.Path].Version = u.Version
} }

View File

@ -1,5 +1,5 @@
// Code generated by protoc-gen-go-micro. DO NOT EDIT. // Code generated by protoc-gen-go-micro. DO NOT EDIT.
// protoc-gen-go-micro version: v4.0.2 // protoc-gen-go-micro version: v3.10.4
package pkgdashpb package pkgdashpb

View File

@ -1,15 +1,15 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.26.0 // protoc-gen-go v1.26.0
// protoc v4.24.4 // protoc v5.28.3
// source: pkgdash.proto // source: pkgdash.proto
package pkgdashpb package pkgdashpb
import ( import (
_ "github.com/envoyproxy/protoc-gen-validate/validate" _ "github.com/envoyproxy/protoc-gen-validate/validate"
_ "go.unistack.org/micro-proto/v4/api" _ "go.unistack.org/micro-proto/v3/api"
_ "go.unistack.org/micro-proto/v4/openapiv3" _ "go.unistack.org/micro-proto/v3/openapiv3"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
timestamppb "google.golang.org/protobuf/types/known/timestamppb" timestamppb "google.golang.org/protobuf/types/known/timestamppb"
@ -2034,11 +2034,10 @@ var file_pkgdash_proto_rawDesc = []byte{
0x9e, 0x03, 0x25, 0x2a, 0x0a, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x9e, 0x03, 0x25, 0x2a, 0x0a, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x42,
0x17, 0x0a, 0x15, 0x12, 0x13, 0x0a, 0x11, 0x2e, 0x70, 0x6b, 0x67, 0x64, 0x61, 0x73, 0x68, 0x2e, 0x17, 0x0a, 0x15, 0x12, 0x13, 0x0a, 0x11, 0x2e, 0x70, 0x6b, 0x67, 0x64, 0x61, 0x73, 0x68, 0x2e,
0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x73, 0x70, 0xb2, 0xea, 0xff, 0xf9, 0x01, 0x0d, 0x12, 0x0b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x73, 0x70, 0xb2, 0xea, 0xff, 0xf9, 0x01, 0x0d, 0x12, 0x0b,
0x2f, 0x76, 0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x42, 0x36, 0x5a, 0x34, 0x67, 0x2f, 0x76, 0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x42, 0x29, 0x5a, 0x27, 0x67,
0x6f, 0x2e, 0x75, 0x6e, 0x69, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x75, 0x6f, 0x2e, 0x75, 0x6e, 0x69, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70,
0x6e, 0x69, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2d, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x6b, 0x67, 0x64, 0x6b, 0x67, 0x64, 0x61, 0x73, 0x68, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x70, 0x6b, 0x67,
0x61, 0x73, 0x68, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x70, 0x6b, 0x67, 0x64, 0x61, 0x73, 0x64, 0x61, 0x73, 0x68, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x68, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (

View File

@ -7,7 +7,7 @@ import "google/protobuf/timestamp.proto";
import "openapiv3/annotations.proto"; import "openapiv3/annotations.proto";
import "validate/validate.proto"; import "validate/validate.proto";
option go_package = "go.unistack.org/unistack-org/pkgdash/proto;pkgdashpb"; option go_package = "go.unistack.org/pkgdash/proto;pkgdashpb";
service Pkgdash { service Pkgdash {
rpc PackageLookup(PackageLookupReq) returns (PackageLookupRsp) { rpc PackageLookup(PackageLookupReq) returns (PackageLookupRsp) {

View File

@ -1,37 +1,36 @@
// Code generated by protoc-gen-go-micro. DO NOT EDIT. // Code generated by protoc-gen-go-micro. DO NOT EDIT.
// versions: // versions:
// - protoc-gen-go-micro v4.0.2 // - protoc-gen-go-micro v3.10.4
// - protoc v4.24.4 // - protoc v5.28.3
// source: pkgdash.proto // source: pkgdash.proto
package pkgdashpb package pkgdashpb
import ( import (
context "context" context "context"
_ "go.unistack.org/micro/v4/client" client "go.unistack.org/micro/v3/client"
options "go.unistack.org/micro/v4/options"
) )
var ( var (
PkgdashServiceName = "PkgdashService" PkgdashName = "Pkgdash"
) )
type PkgdashServiceClient interface { type PkgdashClient interface {
PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...options.Option) (*PackageLookupRsp, error) PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...client.CallOption) (*PackageLookupRsp, error)
PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...options.Option) (*PackageCreateRsp, error) PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...client.CallOption) (*PackageCreateRsp, error)
PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...options.Option) (*PackageDeleteRsp, error) PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...client.CallOption) (*PackageDeleteRsp, error)
PackageList(ctx context.Context, req *PackageListReq, opts ...options.Option) (*PackageListRsp, error) PackageList(ctx context.Context, req *PackageListReq, opts ...client.CallOption) (*PackageListRsp, error)
HandlerList(ctx context.Context, req *HandlerListReq, opts ...options.Option) (*HandlerListRsp, error) HandlerList(ctx context.Context, req *HandlerListReq, opts ...client.CallOption) (*HandlerListRsp, error)
PackageModules(ctx context.Context, req *PackageModulesReq, opts ...options.Option) (*PackageModulesRsp, error) PackageModules(ctx context.Context, req *PackageModulesReq, opts ...client.CallOption) (*PackageModulesRsp, error)
PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...options.Option) (*PackageUpdateRsp, error) PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...client.CallOption) (*PackageUpdateRsp, error)
CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...options.Option) (*CommentCreateRsp, error) CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...client.CallOption) (*CommentCreateRsp, error)
CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...options.Option) (*CommentLookupRsp, error) CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...client.CallOption) (*CommentLookupRsp, error)
CommentList(ctx context.Context, req *CommentListReq, opts ...options.Option) (*CommentListRsp, error) CommentList(ctx context.Context, req *CommentListReq, opts ...client.CallOption) (*CommentListRsp, error)
CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...options.Option) (*CommentDeleteRsp, error) CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...client.CallOption) (*CommentDeleteRsp, error)
ModuleList(ctx context.Context, req *ModuleListReq, opts ...options.Option) (*ModuleListRsp, error) ModuleList(ctx context.Context, req *ModuleListReq, opts ...client.CallOption) (*ModuleListRsp, error)
} }
type PkgdashServiceServer interface { type PkgdashServer interface {
PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error
PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error
PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error

View File

@ -1,114 +1,113 @@
// Code generated by protoc-gen-go-micro. DO NOT EDIT. // Code generated by protoc-gen-go-micro. DO NOT EDIT.
// protoc-gen-go-micro version: v4.0.2 // protoc-gen-go-micro version: v3.10.4
// source: pkgdash.proto // source: pkgdash.proto
package pkgdashpb package pkgdashpb
import ( import (
context "context" context "context"
v41 "go.unistack.org/micro-client-http/v4" v31 "go.unistack.org/micro-client-http/v3"
v4 "go.unistack.org/micro-server-http/v4" v3 "go.unistack.org/micro-server-http/v3"
client "go.unistack.org/micro/v4/client" client "go.unistack.org/micro/v3/client"
options "go.unistack.org/micro/v4/options" server "go.unistack.org/micro/v3/server"
server "go.unistack.org/micro/v4/server"
http "net/http" http "net/http"
) )
var ( var (
PkgdashServiceServerEndpoints = []v4.EndpointMetadata{ PkgdashServerEndpoints = []v3.EndpointMetadata{
{ {
Name: "PkgdashService.PackageLookup", Name: "Pkgdash.PackageLookup",
Path: "/v1/packages/{id}", Path: "/v1/packages/{id}",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.PackageCreate", Name: "Pkgdash.PackageCreate",
Path: "/v1/packages", Path: "/v1/packages",
Method: "POST", Method: "POST",
Body: "*", Body: "*",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.PackageDelete", Name: "Pkgdash.PackageDelete",
Path: "/v1/packages/{id}", Path: "/v1/packages/{id}",
Method: "DELETE", Method: "DELETE",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.PackageList", Name: "Pkgdash.PackageList",
Path: "/v1/packages", Path: "/v1/packages",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.HandlerList", Name: "Pkgdash.HandlerList",
Path: "/v1/packages/{package}/handlers", Path: "/v1/packages/{package}/handlers",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.PackageModules", Name: "Pkgdash.PackageModules",
Path: "/v1/packages/{package}/modules", Path: "/v1/packages/{package}/modules",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.PackageUpdate", Name: "Pkgdash.PackageUpdate",
Path: "/v1/packages/{id}", Path: "/v1/packages/{id}",
Method: "PUT", Method: "PUT",
Body: "*", Body: "*",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentCreate", Name: "Pkgdash.CommentCreate",
Path: "/v1/packages/{package}/comments", Path: "/v1/packages/{package}/comments",
Method: "POST", Method: "POST",
Body: "*", Body: "*",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentLookup", Name: "Pkgdash.CommentLookup",
Path: "/v1/comments/{id}", Path: "/v1/comments/{id}",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentLookup", Name: "Pkgdash.CommentLookup",
Path: "/v1/comments/{package}/comments/{id}", Path: "/v1/comments/{package}/comments/{id}",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentList", Name: "Pkgdash.CommentList",
Path: "/v1/packages/{package}/comments", Path: "/v1/packages/{package}/comments",
Method: "GET", Method: "GET",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentDelete", Name: "Pkgdash.CommentDelete",
Path: "/v1/packages/{package_id}/comments/{id}", Path: "/v1/packages/{package_id}/comments/{id}",
Method: "DELETE", Method: "DELETE",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.CommentDelete", Name: "Pkgdash.CommentDelete",
Path: "/v1/comments/{id}", Path: "/v1/comments/{id}",
Method: "DELETE", Method: "DELETE",
Body: "", Body: "",
Stream: false, Stream: false,
}, },
{ {
Name: "PkgdashService.ModuleList", Name: "Pkgdash.ModuleList",
Path: "/v1/modules", Path: "/v1/modules",
Method: "GET", Method: "GET",
Body: "", Body: "",
@ -117,288 +116,288 @@ var (
} }
) )
type pkgdashServiceClient struct { type pkgdashClient struct {
c client.Client c client.Client
name string name string
} }
func NewPkgdashServiceClient(name string, c client.Client) PkgdashServiceClient { func NewPkgdashClient(name string, c client.Client) PkgdashClient {
return &pkgdashServiceClient{c: c, name: name} return &pkgdashClient{c: c, name: name}
} }
func (c *pkgdashServiceClient) PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...options.Option) (*PackageLookupRsp, error) { func (c *pkgdashClient) PackageLookup(ctx context.Context, req *PackageLookupReq, opts ...client.CallOption) (*PackageLookupRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/packages/{id}"), v31.Path("/v1/packages/{id}"),
) )
rsp := &PackageLookupRsp{} rsp := &PackageLookupRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageLookup", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageLookup", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...options.Option) (*PackageCreateRsp, error) { func (c *pkgdashClient) PackageCreate(ctx context.Context, req *PackageCreateReq, opts ...client.CallOption) (*PackageCreateRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodPost), v31.Method(http.MethodPost),
v41.Path("/v1/packages"), v31.Path("/v1/packages"),
v41.Body("*"), v31.Body("*"),
) )
rsp := &PackageCreateRsp{} rsp := &PackageCreateRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageCreate", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageCreate", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...options.Option) (*PackageDeleteRsp, error) { func (c *pkgdashClient) PackageDelete(ctx context.Context, req *PackageDeleteReq, opts ...client.CallOption) (*PackageDeleteRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodDelete), v31.Method(http.MethodDelete),
v41.Path("/v1/packages/{id}"), v31.Path("/v1/packages/{id}"),
) )
rsp := &PackageDeleteRsp{} rsp := &PackageDeleteRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageDelete", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageDelete", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) PackageList(ctx context.Context, req *PackageListReq, opts ...options.Option) (*PackageListRsp, error) { func (c *pkgdashClient) PackageList(ctx context.Context, req *PackageListReq, opts ...client.CallOption) (*PackageListRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/packages"), v31.Path("/v1/packages"),
) )
rsp := &PackageListRsp{} rsp := &PackageListRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageList", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageList", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) HandlerList(ctx context.Context, req *HandlerListReq, opts ...options.Option) (*HandlerListRsp, error) { func (c *pkgdashClient) HandlerList(ctx context.Context, req *HandlerListReq, opts ...client.CallOption) (*HandlerListRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/packages/{package}/handlers"), v31.Path("/v1/packages/{package}/handlers"),
) )
rsp := &HandlerListRsp{} rsp := &HandlerListRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.HandlerList", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.HandlerList", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) PackageModules(ctx context.Context, req *PackageModulesReq, opts ...options.Option) (*PackageModulesRsp, error) { func (c *pkgdashClient) PackageModules(ctx context.Context, req *PackageModulesReq, opts ...client.CallOption) (*PackageModulesRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/packages/{package}/modules"), v31.Path("/v1/packages/{package}/modules"),
) )
rsp := &PackageModulesRsp{} rsp := &PackageModulesRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageModules", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageModules", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...options.Option) (*PackageUpdateRsp, error) { func (c *pkgdashClient) PackageUpdate(ctx context.Context, req *PackageUpdateReq, opts ...client.CallOption) (*PackageUpdateRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodPut), v31.Method(http.MethodPut),
v41.Path("/v1/packages/{id}"), v31.Path("/v1/packages/{id}"),
v41.Body("*"), v31.Body("*"),
) )
rsp := &PackageUpdateRsp{} rsp := &PackageUpdateRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.PackageUpdate", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.PackageUpdate", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...options.Option) (*CommentCreateRsp, error) { func (c *pkgdashClient) CommentCreate(ctx context.Context, req *CommentCreateReq, opts ...client.CallOption) (*CommentCreateRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodPost), v31.Method(http.MethodPost),
v41.Path("/v1/packages/{package}/comments"), v31.Path("/v1/packages/{package}/comments"),
v41.Body("*"), v31.Body("*"),
) )
rsp := &CommentCreateRsp{} rsp := &CommentCreateRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.CommentCreate", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentCreate", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...options.Option) (*CommentLookupRsp, error) { func (c *pkgdashClient) CommentLookup(ctx context.Context, req *CommentLookupReq, opts ...client.CallOption) (*CommentLookupRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/comments/{id}"), v31.Path("/v1/comments/{id}"),
) )
rsp := &CommentLookupRsp{} rsp := &CommentLookupRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.CommentLookup", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentLookup", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) CommentList(ctx context.Context, req *CommentListReq, opts ...options.Option) (*CommentListRsp, error) { func (c *pkgdashClient) CommentList(ctx context.Context, req *CommentListReq, opts ...client.CallOption) (*CommentListRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/packages/{package}/comments"), v31.Path("/v1/packages/{package}/comments"),
) )
rsp := &CommentListRsp{} rsp := &CommentListRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.CommentList", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentList", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...options.Option) (*CommentDeleteRsp, error) { func (c *pkgdashClient) CommentDelete(ctx context.Context, req *CommentDeleteReq, opts ...client.CallOption) (*CommentDeleteRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodDelete), v31.Method(http.MethodDelete),
v41.Path("/v1/packages/{package_id}/comments/{id}"), v31.Path("/v1/packages/{package_id}/comments/{id}"),
) )
rsp := &CommentDeleteRsp{} rsp := &CommentDeleteRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.CommentDelete", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.CommentDelete", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
func (c *pkgdashServiceClient) ModuleList(ctx context.Context, req *ModuleListReq, opts ...options.Option) (*ModuleListRsp, error) { func (c *pkgdashClient) ModuleList(ctx context.Context, req *ModuleListReq, opts ...client.CallOption) (*ModuleListRsp, error) {
errmap := make(map[string]interface{}, 1) errmap := make(map[string]interface{}, 1)
errmap["default"] = &ErrorRsp{} errmap["default"] = &ErrorRsp{}
opts = append(opts, opts = append(opts,
v41.ErrorMap(errmap), v31.ErrorMap(errmap),
) )
opts = append(opts, opts = append(opts,
v41.Method(http.MethodGet), v31.Method(http.MethodGet),
v41.Path("/v1/modules"), v31.Path("/v1/modules"),
) )
rsp := &ModuleListRsp{} rsp := &ModuleListRsp{}
err := c.c.Call(ctx, c.c.NewRequest(c.name, "PkgdashService.ModuleList", req), rsp, opts...) err := c.c.Call(ctx, c.c.NewRequest(c.name, "Pkgdash.ModuleList", req), rsp, opts...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return rsp, nil return rsp, nil
} }
type pkgdashServiceServer struct { type pkgdashServer struct {
PkgdashServiceServer PkgdashServer
} }
func (h *pkgdashServiceServer) PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error { func (h *pkgdashServer) PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error {
return h.PkgdashServiceServer.PackageLookup(ctx, req, rsp) return h.PkgdashServer.PackageLookup(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error { func (h *pkgdashServer) PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error {
return h.PkgdashServiceServer.PackageCreate(ctx, req, rsp) return h.PkgdashServer.PackageCreate(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error { func (h *pkgdashServer) PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error {
return h.PkgdashServiceServer.PackageDelete(ctx, req, rsp) return h.PkgdashServer.PackageDelete(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) PackageList(ctx context.Context, req *PackageListReq, rsp *PackageListRsp) error { func (h *pkgdashServer) PackageList(ctx context.Context, req *PackageListReq, rsp *PackageListRsp) error {
return h.PkgdashServiceServer.PackageList(ctx, req, rsp) return h.PkgdashServer.PackageList(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) HandlerList(ctx context.Context, req *HandlerListReq, rsp *HandlerListRsp) error { func (h *pkgdashServer) HandlerList(ctx context.Context, req *HandlerListReq, rsp *HandlerListRsp) error {
return h.PkgdashServiceServer.HandlerList(ctx, req, rsp) return h.PkgdashServer.HandlerList(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) PackageModules(ctx context.Context, req *PackageModulesReq, rsp *PackageModulesRsp) error { func (h *pkgdashServer) PackageModules(ctx context.Context, req *PackageModulesReq, rsp *PackageModulesRsp) error {
return h.PkgdashServiceServer.PackageModules(ctx, req, rsp) return h.PkgdashServer.PackageModules(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) PackageUpdate(ctx context.Context, req *PackageUpdateReq, rsp *PackageUpdateRsp) error { func (h *pkgdashServer) PackageUpdate(ctx context.Context, req *PackageUpdateReq, rsp *PackageUpdateRsp) error {
return h.PkgdashServiceServer.PackageUpdate(ctx, req, rsp) return h.PkgdashServer.PackageUpdate(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) CommentCreate(ctx context.Context, req *CommentCreateReq, rsp *CommentCreateRsp) error { func (h *pkgdashServer) CommentCreate(ctx context.Context, req *CommentCreateReq, rsp *CommentCreateRsp) error {
return h.PkgdashServiceServer.CommentCreate(ctx, req, rsp) return h.PkgdashServer.CommentCreate(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) CommentLookup(ctx context.Context, req *CommentLookupReq, rsp *CommentLookupRsp) error { func (h *pkgdashServer) CommentLookup(ctx context.Context, req *CommentLookupReq, rsp *CommentLookupRsp) error {
return h.PkgdashServiceServer.CommentLookup(ctx, req, rsp) return h.PkgdashServer.CommentLookup(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) CommentList(ctx context.Context, req *CommentListReq, rsp *CommentListRsp) error { func (h *pkgdashServer) CommentList(ctx context.Context, req *CommentListReq, rsp *CommentListRsp) error {
return h.PkgdashServiceServer.CommentList(ctx, req, rsp) return h.PkgdashServer.CommentList(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error { func (h *pkgdashServer) CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error {
return h.PkgdashServiceServer.CommentDelete(ctx, req, rsp) return h.PkgdashServer.CommentDelete(ctx, req, rsp)
} }
func (h *pkgdashServiceServer) ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error { func (h *pkgdashServer) ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error {
return h.PkgdashServiceServer.ModuleList(ctx, req, rsp) return h.PkgdashServer.ModuleList(ctx, req, rsp)
} }
func RegisterPkgdashServiceServer(s server.Server, sh PkgdashServiceServer, opts ...options.Option) error { func RegisterPkgdashServer(s server.Server, sh PkgdashServer, opts ...server.HandlerOption) error {
type pkgdashService interface { type pkgdash interface {
PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error PackageLookup(ctx context.Context, req *PackageLookupReq, rsp *PackageLookupRsp) error
PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error PackageCreate(ctx context.Context, req *PackageCreateReq, rsp *PackageCreateRsp) error
PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error PackageDelete(ctx context.Context, req *PackageDeleteReq, rsp *PackageDeleteRsp) error
@ -412,11 +411,11 @@ func RegisterPkgdashServiceServer(s server.Server, sh PkgdashServiceServer, opts
CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error CommentDelete(ctx context.Context, req *CommentDeleteReq, rsp *CommentDeleteRsp) error
ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error ModuleList(ctx context.Context, req *ModuleListReq, rsp *ModuleListRsp) error
} }
type PkgdashService struct { type Pkgdash struct {
pkgdashService pkgdash
} }
h := &pkgdashServiceServer{sh} h := &pkgdashServer{sh}
var nopts []options.Option var nopts []server.HandlerOption
nopts = append(nopts, v4.HandlerEndpoints(PkgdashServiceServerEndpoints)) nopts = append(nopts, v3.HandlerEndpoints(PkgdashServerEndpoints))
return s.Handle(&PkgdashService{h}, append(nopts, opts...)...) return s.Handle(s.NewHandler(&Pkgdash{h}, append(nopts, opts...)...))
} }