mirror of
https://github.com/kiegroup/git-backporting.git
synced 2025-04-22 03:28:42 +00:00
Compare commits
No commits in common. "main" and "v4.8.2" have entirely different histories.
14 changed files with 5585 additions and 4518 deletions
20
CHANGELOG.md
20
CHANGELOG.md
|
@ -1,25 +1,5 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## <small>4.8.5 (2025-04-15)</small>
|
|
||||||
|
|
||||||
* build(deps): audit fix (#150) ([3a9d367](https://github.com/kiegroup/git-backporting/commit/3a9d367)), closes [#150](https://github.com/kiegroup/git-backporting/issues/150)
|
|
||||||
* build(deps): upgrade release-it to v18 (#153) ([c9a7375](https://github.com/kiegroup/git-backporting/commit/c9a7375)), closes [#153](https://github.com/kiegroup/git-backporting/issues/153)
|
|
||||||
* fix(#151): fix gitlab post comments url (#152) ([d74a787](https://github.com/kiegroup/git-backporting/commit/d74a787)), closes [#152](https://github.com/kiegroup/git-backporting/issues/152)
|
|
||||||
|
|
||||||
## [4.8.4](https://github.com/kiegroup/git-backporting/compare/v4.8.3...v4.8.4) (2024-11-02)
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* abort conflicting cherry-pick before starting new one ([#146](https://github.com/kiegroup/git-backporting/issues/146)) ([3deee59](https://github.com/kiegroup/git-backporting/commit/3deee59d4c3b726ae131b5974af4618dd5e7d1d2))
|
|
||||||
|
|
||||||
## [4.8.3](https://github.com/kiegroup/git-backporting/compare/v4.8.2...v4.8.3) (2024-10-10)
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* auto-no-squash inference for GitLab ([#140](https://github.com/kiegroup/git-backporting/issues/140)) ([b4d0481](https://github.com/kiegroup/git-backporting/commit/b4d0481c5649115367f1ae0724d12d55b6b86e10))
|
|
||||||
|
|
||||||
## [4.8.2](https://github.com/kiegroup/git-backporting/compare/v4.8.1...v4.8.2) (2024-10-07)
|
## [4.8.2](https://github.com/kiegroup/git-backporting/compare/v4.8.1...v4.8.2) (2024-10-07)
|
||||||
|
|
||||||
|
|
||||||
|
|
355
dist/cli/index.js
vendored
355
dist/cli/index.js
vendored
|
@ -541,13 +541,6 @@ class GitCLIService {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
||||||
// ensure the working tree is properly reset - no stale changes
|
|
||||||
// from previous (failed) backport
|
|
||||||
const ongoingCherryPick = await this.anyConflict(to);
|
|
||||||
if (ongoingCherryPick) {
|
|
||||||
this.logger.warn("Found previously failed cherry-pick, aborting it");
|
|
||||||
await this.git(to).raw(["cherry-pick", "--abort"]);
|
|
||||||
}
|
|
||||||
// checkout to the proper branch
|
// checkout to the proper branch
|
||||||
this.logger.info(`Checking out branch ${branch}`);
|
this.logger.info(`Checking out branch ${branch}`);
|
||||||
await this.git(to).checkout(branch);
|
await this.git(to).checkout(branch);
|
||||||
|
@ -605,20 +598,6 @@ class GitCLIService {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* Check whether there are some conflicts in the current working directory
|
|
||||||
* which means there is an ongoing cherry-pick that did not complete successfully
|
|
||||||
* @param cwd repository in which the check should be performed
|
|
||||||
* @return true if there is some conflict, false otherwise
|
|
||||||
*/
|
|
||||||
async anyConflict(cwd) {
|
|
||||||
const status = await this.git(cwd).status();
|
|
||||||
if (status.conflicted.length > 0) {
|
|
||||||
this.logger.debug(`Found conflicts in branch ${status.current}`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Push a branch to a remote
|
* Push a branch to a remote
|
||||||
* @param cwd repository in which the push should be performed
|
* @param cwd repository in which the push should be performed
|
||||||
|
@ -754,7 +733,7 @@ exports.inferGitApiUrl = inferGitApiUrl;
|
||||||
/**
|
/**
|
||||||
* Infer the value of the squash option
|
* Infer the value of the squash option
|
||||||
* @param open true if the pull/merge request is still open
|
* @param open true if the pull/merge request is still open
|
||||||
* @param squash_commit undefined or null if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
* @param squash_commit undefined if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
||||||
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
||||||
*/
|
*/
|
||||||
const inferSquash = (open, squash_commit) => {
|
const inferSquash = (open, squash_commit) => {
|
||||||
|
@ -764,7 +743,7 @@ const inferSquash = (open, squash_commit) => {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (squash_commit) {
|
if (squash_commit !== undefined) {
|
||||||
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -1151,9 +1130,7 @@ class GitLabClient {
|
||||||
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
||||||
async getPullRequest(namespace, repo, mrNumber, squash) {
|
async getPullRequest(namespace, repo, mrNumber, squash) {
|
||||||
const projectId = this.getProjectId(namespace, repo);
|
const projectId = this.getProjectId(namespace, repo);
|
||||||
const url = `/projects/${projectId}/merge_requests/${mrNumber}`;
|
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${mrNumber}`);
|
||||||
this.logger.debug(`Fetching pull request ${url}`);
|
|
||||||
const { data } = await this.client.get(`${url}`);
|
|
||||||
if (squash === undefined) {
|
if (squash === undefined) {
|
||||||
squash = (0, git_util_1.inferSquash)(data.state === "opened", data.squash_commit_sha);
|
squash = (0, git_util_1.inferSquash)(data.state === "opened", data.squash_commit_sha);
|
||||||
}
|
}
|
||||||
|
@ -1243,7 +1220,7 @@ class GitLabClient {
|
||||||
try {
|
try {
|
||||||
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
||||||
const projectId = this.getProjectId(namespace, project);
|
const projectId = this.getProjectId(namespace, project);
|
||||||
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${id}/notes`, {
|
const { data } = await this.client.post(`/projects/${projectId}/issues/${id}/notes`, {
|
||||||
body: comment,
|
body: comment,
|
||||||
});
|
});
|
||||||
if (!data) {
|
if (!data) {
|
||||||
|
@ -16268,14 +16245,6 @@ module.exports = require("child_process");
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6113:
|
|
||||||
/***/ ((module) => {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
module.exports = require("crypto");
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
|
|
||||||
/***/ 2361:
|
/***/ 2361:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
|
@ -19644,11 +19613,10 @@ exports.suggestSimilar = suggestSimilar;
|
||||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
/*! Axios v1.8.4 Copyright (c) 2025 Matt Zabriskie and contributors */
|
// Axios v1.7.4 Copyright (c) 2024 Matt Zabriskie and contributors
|
||||||
|
|
||||||
|
|
||||||
const FormData$1 = __nccwpck_require__(4334);
|
const FormData$1 = __nccwpck_require__(4334);
|
||||||
const crypto = __nccwpck_require__(6113);
|
|
||||||
const url = __nccwpck_require__(7310);
|
const url = __nccwpck_require__(7310);
|
||||||
const proxyFromEnv = __nccwpck_require__(3329);
|
const proxyFromEnv = __nccwpck_require__(3329);
|
||||||
const http = __nccwpck_require__(3685);
|
const http = __nccwpck_require__(3685);
|
||||||
|
@ -19662,9 +19630,7 @@ const events = __nccwpck_require__(2361);
|
||||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||||
|
|
||||||
const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1);
|
const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1);
|
||||||
const crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
|
|
||||||
const url__default = /*#__PURE__*/_interopDefaultLegacy(url);
|
const url__default = /*#__PURE__*/_interopDefaultLegacy(url);
|
||||||
const proxyFromEnv__default = /*#__PURE__*/_interopDefaultLegacy(proxyFromEnv);
|
|
||||||
const http__default = /*#__PURE__*/_interopDefaultLegacy(http);
|
const http__default = /*#__PURE__*/_interopDefaultLegacy(http);
|
||||||
const https__default = /*#__PURE__*/_interopDefaultLegacy(https);
|
const https__default = /*#__PURE__*/_interopDefaultLegacy(https);
|
||||||
const util__default = /*#__PURE__*/_interopDefaultLegacy(util);
|
const util__default = /*#__PURE__*/_interopDefaultLegacy(util);
|
||||||
|
@ -20278,6 +20244,26 @@ const toFiniteNumber = (value, defaultValue) => {
|
||||||
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
|
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ALPHA = 'abcdefghijklmnopqrstuvwxyz';
|
||||||
|
|
||||||
|
const DIGIT = '0123456789';
|
||||||
|
|
||||||
|
const ALPHABET = {
|
||||||
|
DIGIT,
|
||||||
|
ALPHA,
|
||||||
|
ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => {
|
||||||
|
let str = '';
|
||||||
|
const {length} = alphabet;
|
||||||
|
while (size--) {
|
||||||
|
str += alphabet[Math.random() * length|0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If the thing is a FormData object, return true, otherwise return false.
|
* If the thing is a FormData object, return true, otherwise return false.
|
||||||
*
|
*
|
||||||
|
@ -20405,6 +20391,8 @@ const utils$1 = {
|
||||||
findKey,
|
findKey,
|
||||||
global: _global,
|
global: _global,
|
||||||
isContextDefined,
|
isContextDefined,
|
||||||
|
ALPHABET,
|
||||||
|
generateString,
|
||||||
isSpecCompliantForm,
|
isSpecCompliantForm,
|
||||||
toJSONObject,
|
toJSONObject,
|
||||||
isAsyncFn,
|
isAsyncFn,
|
||||||
|
@ -20438,10 +20426,7 @@ function AxiosError(message, code, config, request, response) {
|
||||||
code && (this.code = code);
|
code && (this.code = code);
|
||||||
config && (this.config = config);
|
config && (this.config = config);
|
||||||
request && (this.request = request);
|
request && (this.request = request);
|
||||||
if (response) {
|
response && (this.response = response);
|
||||||
this.response = response;
|
|
||||||
this.status = response.status ? response.status : null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
utils$1.inherits(AxiosError, Error, {
|
utils$1.inherits(AxiosError, Error, {
|
||||||
|
@ -20461,7 +20446,7 @@ utils$1.inherits(AxiosError, Error, {
|
||||||
// Axios
|
// Axios
|
||||||
config: utils$1.toJSONObject(this.config),
|
config: utils$1.toJSONObject(this.config),
|
||||||
code: this.code,
|
code: this.code,
|
||||||
status: this.status
|
status: this.response && this.response.status ? this.response.status : null
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -20798,7 +20783,7 @@ function encode(val) {
|
||||||
*
|
*
|
||||||
* @param {string} url The base of the url (e.g., http://www.google.com)
|
* @param {string} url The base of the url (e.g., http://www.google.com)
|
||||||
* @param {object} [params] The params to be appended
|
* @param {object} [params] The params to be appended
|
||||||
* @param {?(object|Function)} options
|
* @param {?object} options
|
||||||
*
|
*
|
||||||
* @returns {string} The formatted url
|
* @returns {string} The formatted url
|
||||||
*/
|
*/
|
||||||
|
@ -20810,12 +20795,6 @@ function buildURL(url, params, options) {
|
||||||
|
|
||||||
const _encode = options && options.encode || encode;
|
const _encode = options && options.encode || encode;
|
||||||
|
|
||||||
if (utils$1.isFunction(options)) {
|
|
||||||
options = {
|
|
||||||
serialize: options
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const serializeFn = options && options.serialize;
|
const serializeFn = options && options.serialize;
|
||||||
|
|
||||||
let serializedParams;
|
let serializedParams;
|
||||||
|
@ -20916,29 +20895,6 @@ const transitionalDefaults = {
|
||||||
|
|
||||||
const URLSearchParams = url__default["default"].URLSearchParams;
|
const URLSearchParams = url__default["default"].URLSearchParams;
|
||||||
|
|
||||||
const ALPHA = 'abcdefghijklmnopqrstuvwxyz';
|
|
||||||
|
|
||||||
const DIGIT = '0123456789';
|
|
||||||
|
|
||||||
const ALPHABET = {
|
|
||||||
DIGIT,
|
|
||||||
ALPHA,
|
|
||||||
ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT
|
|
||||||
};
|
|
||||||
|
|
||||||
const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => {
|
|
||||||
let str = '';
|
|
||||||
const {length} = alphabet;
|
|
||||||
const randomValues = new Uint32Array(size);
|
|
||||||
crypto__default["default"].randomFillSync(randomValues);
|
|
||||||
for (let i = 0; i < size; i++) {
|
|
||||||
str += alphabet[randomValues[i] % length];
|
|
||||||
}
|
|
||||||
|
|
||||||
return str;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const platform$1 = {
|
const platform$1 = {
|
||||||
isNode: true,
|
isNode: true,
|
||||||
classes: {
|
classes: {
|
||||||
|
@ -20946,15 +20902,11 @@ const platform$1 = {
|
||||||
FormData: FormData__default["default"],
|
FormData: FormData__default["default"],
|
||||||
Blob: typeof Blob !== 'undefined' && Blob || null
|
Blob: typeof Blob !== 'undefined' && Blob || null
|
||||||
},
|
},
|
||||||
ALPHABET,
|
|
||||||
generateString,
|
|
||||||
protocols: [ 'http', 'https', 'file', 'data' ]
|
protocols: [ 'http', 'https', 'file', 'data' ]
|
||||||
};
|
};
|
||||||
|
|
||||||
const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined';
|
const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined';
|
||||||
|
|
||||||
const _navigator = typeof navigator === 'object' && navigator || undefined;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if we're running in a standard browser environment
|
* Determine if we're running in a standard browser environment
|
||||||
*
|
*
|
||||||
|
@ -20972,8 +20924,10 @@ const _navigator = typeof navigator === 'object' && navigator || undefined;
|
||||||
*
|
*
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
const hasStandardBrowserEnv = hasBrowserEnv &&
|
const hasStandardBrowserEnv = (
|
||||||
(!_navigator || ['ReactNative', 'NativeScript', 'NS'].indexOf(_navigator.product) < 0);
|
(product) => {
|
||||||
|
return hasBrowserEnv && ['ReactNative', 'NativeScript', 'NS'].indexOf(product) < 0
|
||||||
|
})(typeof navigator !== 'undefined' && navigator.product);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if we're running in a standard browser webWorker environment
|
* Determine if we're running in a standard browser webWorker environment
|
||||||
|
@ -21000,7 +20954,6 @@ const utils = /*#__PURE__*/Object.freeze({
|
||||||
hasBrowserEnv: hasBrowserEnv,
|
hasBrowserEnv: hasBrowserEnv,
|
||||||
hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv,
|
hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv,
|
||||||
hasStandardBrowserEnv: hasStandardBrowserEnv,
|
hasStandardBrowserEnv: hasStandardBrowserEnv,
|
||||||
navigator: _navigator,
|
|
||||||
origin: origin
|
origin: origin
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -21722,15 +21675,14 @@ function combineURLs(baseURL, relativeURL) {
|
||||||
*
|
*
|
||||||
* @returns {string} The combined full path
|
* @returns {string} The combined full path
|
||||||
*/
|
*/
|
||||||
function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) {
|
function buildFullPath(baseURL, requestedURL) {
|
||||||
let isRelativeUrl = !isAbsoluteURL(requestedURL);
|
if (baseURL && !isAbsoluteURL(requestedURL)) {
|
||||||
if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) {
|
|
||||||
return combineURLs(baseURL, requestedURL);
|
return combineURLs(baseURL, requestedURL);
|
||||||
}
|
}
|
||||||
return requestedURL;
|
return requestedURL;
|
||||||
}
|
}
|
||||||
|
|
||||||
const VERSION = "1.8.4";
|
const VERSION = "1.7.4";
|
||||||
|
|
||||||
function parseProtocol(url) {
|
function parseProtocol(url) {
|
||||||
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
|
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
|
||||||
|
@ -21940,9 +21892,9 @@ const readBlob = async function* (blob) {
|
||||||
|
|
||||||
const readBlob$1 = readBlob;
|
const readBlob$1 = readBlob;
|
||||||
|
|
||||||
const BOUNDARY_ALPHABET = platform.ALPHABET.ALPHA_DIGIT + '-_';
|
const BOUNDARY_ALPHABET = utils$1.ALPHABET.ALPHA_DIGIT + '-_';
|
||||||
|
|
||||||
const textEncoder = typeof TextEncoder === 'function' ? new TextEncoder() : new util__default["default"].TextEncoder();
|
const textEncoder = new util.TextEncoder();
|
||||||
|
|
||||||
const CRLF = '\r\n';
|
const CRLF = '\r\n';
|
||||||
const CRLF_BYTES = textEncoder.encode(CRLF);
|
const CRLF_BYTES = textEncoder.encode(CRLF);
|
||||||
|
@ -22000,7 +21952,7 @@ const formDataToStream = (form, headersHandler, options) => {
|
||||||
const {
|
const {
|
||||||
tag = 'form-data-boundary',
|
tag = 'form-data-boundary',
|
||||||
size = 25,
|
size = 25,
|
||||||
boundary = tag + '-' + platform.generateString(size, BOUNDARY_ALPHABET)
|
boundary = tag + '-' + utils$1.generateString(size, BOUNDARY_ALPHABET)
|
||||||
} = options || {};
|
} = options || {};
|
||||||
|
|
||||||
if(!utils$1.isFormData(form)) {
|
if(!utils$1.isFormData(form)) {
|
||||||
|
@ -22280,7 +22232,7 @@ function dispatchBeforeRedirect(options, responseDetails) {
|
||||||
function setProxy(options, configProxy, location) {
|
function setProxy(options, configProxy, location) {
|
||||||
let proxy = configProxy;
|
let proxy = configProxy;
|
||||||
if (!proxy && proxy !== false) {
|
if (!proxy && proxy !== false) {
|
||||||
const proxyUrl = proxyFromEnv__default["default"].getProxyForUrl(location);
|
const proxyUrl = proxyFromEnv.getProxyForUrl(location);
|
||||||
if (proxyUrl) {
|
if (proxyUrl) {
|
||||||
proxy = new URL(proxyUrl);
|
proxy = new URL(proxyUrl);
|
||||||
}
|
}
|
||||||
|
@ -22425,8 +22377,8 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse url
|
// Parse url
|
||||||
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
const fullPath = buildFullPath(config.baseURL, config.url);
|
||||||
const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined);
|
const parsed = new URL(fullPath, utils$1.hasBrowserEnv ? platform.origin : undefined);
|
||||||
const protocol = parsed.protocol || supportedProtocols[0];
|
const protocol = parsed.protocol || supportedProtocols[0];
|
||||||
|
|
||||||
if (protocol === 'data:') {
|
if (protocol === 'data:') {
|
||||||
|
@ -22511,7 +22463,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (utils$1.isBlob(data) || utils$1.isFile(data)) {
|
} else if (utils$1.isBlob(data)) {
|
||||||
data.size && headers.setContentType(data.type || 'application/octet-stream');
|
data.size && headers.setContentType(data.type || 'application/octet-stream');
|
||||||
headers.setContentLength(data.size || 0);
|
headers.setContentLength(data.size || 0);
|
||||||
data = stream__default["default"].Readable.from(readBlob$1(data));
|
data = stream__default["default"].Readable.from(readBlob$1(data));
|
||||||
|
@ -22622,7 +22574,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
if (config.socketPath) {
|
if (config.socketPath) {
|
||||||
options.socketPath = config.socketPath;
|
options.socketPath = config.socketPath;
|
||||||
} else {
|
} else {
|
||||||
options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname;
|
options.hostname = parsed.hostname;
|
||||||
options.port = parsed.port;
|
options.port = parsed.port;
|
||||||
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
||||||
}
|
}
|
||||||
|
@ -22764,7 +22716,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const err = new AxiosError(
|
const err = new AxiosError(
|
||||||
'stream has been aborted',
|
'maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
||||||
AxiosError.ERR_BAD_RESPONSE,
|
AxiosError.ERR_BAD_RESPONSE,
|
||||||
config,
|
config,
|
||||||
lastRequest
|
lastRequest
|
||||||
|
@ -22887,18 +22839,68 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const isURLSameOrigin = platform.hasStandardBrowserEnv ? ((origin, isMSIE) => (url) => {
|
const isURLSameOrigin = platform.hasStandardBrowserEnv ?
|
||||||
url = new URL(url, platform.origin);
|
|
||||||
|
|
||||||
return (
|
// Standard browser envs have full support of the APIs needed to test
|
||||||
origin.protocol === url.protocol &&
|
// whether the request URL is of the same origin as current location.
|
||||||
origin.host === url.host &&
|
(function standardBrowserEnv() {
|
||||||
(isMSIE || origin.port === url.port)
|
const msie = /(msie|trident)/i.test(navigator.userAgent);
|
||||||
);
|
const urlParsingNode = document.createElement('a');
|
||||||
})(
|
let originURL;
|
||||||
new URL(platform.origin),
|
|
||||||
platform.navigator && /(msie|trident)/i.test(platform.navigator.userAgent)
|
/**
|
||||||
) : () => true;
|
* Parse a URL to discover its components
|
||||||
|
*
|
||||||
|
* @param {String} url The URL to be parsed
|
||||||
|
* @returns {Object}
|
||||||
|
*/
|
||||||
|
function resolveURL(url) {
|
||||||
|
let href = url;
|
||||||
|
|
||||||
|
if (msie) {
|
||||||
|
// IE needs attribute set twice to normalize properties
|
||||||
|
urlParsingNode.setAttribute('href', href);
|
||||||
|
href = urlParsingNode.href;
|
||||||
|
}
|
||||||
|
|
||||||
|
urlParsingNode.setAttribute('href', href);
|
||||||
|
|
||||||
|
// urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils
|
||||||
|
return {
|
||||||
|
href: urlParsingNode.href,
|
||||||
|
protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '',
|
||||||
|
host: urlParsingNode.host,
|
||||||
|
search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '',
|
||||||
|
hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '',
|
||||||
|
hostname: urlParsingNode.hostname,
|
||||||
|
port: urlParsingNode.port,
|
||||||
|
pathname: (urlParsingNode.pathname.charAt(0) === '/') ?
|
||||||
|
urlParsingNode.pathname :
|
||||||
|
'/' + urlParsingNode.pathname
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
originURL = resolveURL(window.location.href);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if a URL shares the same origin as the current location
|
||||||
|
*
|
||||||
|
* @param {String} requestURL The URL to test
|
||||||
|
* @returns {boolean} True if URL shares the same origin, otherwise false
|
||||||
|
*/
|
||||||
|
return function isURLSameOrigin(requestURL) {
|
||||||
|
const parsed = (utils$1.isString(requestURL)) ? resolveURL(requestURL) : requestURL;
|
||||||
|
return (parsed.protocol === originURL.protocol &&
|
||||||
|
parsed.host === originURL.host);
|
||||||
|
};
|
||||||
|
})() :
|
||||||
|
|
||||||
|
// Non standard browser envs (web workers, react-native) lack needed support.
|
||||||
|
(function nonStandardBrowserEnv() {
|
||||||
|
return function isURLSameOrigin() {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
const cookies = platform.hasStandardBrowserEnv ?
|
const cookies = platform.hasStandardBrowserEnv ?
|
||||||
|
|
||||||
|
@ -22955,7 +22957,7 @@ function mergeConfig(config1, config2) {
|
||||||
config2 = config2 || {};
|
config2 = config2 || {};
|
||||||
const config = {};
|
const config = {};
|
||||||
|
|
||||||
function getMergedValue(target, source, prop, caseless) {
|
function getMergedValue(target, source, caseless) {
|
||||||
if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) {
|
if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) {
|
||||||
return utils$1.merge.call({caseless}, target, source);
|
return utils$1.merge.call({caseless}, target, source);
|
||||||
} else if (utils$1.isPlainObject(source)) {
|
} else if (utils$1.isPlainObject(source)) {
|
||||||
|
@ -22967,11 +22969,11 @@ function mergeConfig(config1, config2) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
// eslint-disable-next-line consistent-return
|
||||||
function mergeDeepProperties(a, b, prop , caseless) {
|
function mergeDeepProperties(a, b, caseless) {
|
||||||
if (!utils$1.isUndefined(b)) {
|
if (!utils$1.isUndefined(b)) {
|
||||||
return getMergedValue(a, b, prop , caseless);
|
return getMergedValue(a, b, caseless);
|
||||||
} else if (!utils$1.isUndefined(a)) {
|
} else if (!utils$1.isUndefined(a)) {
|
||||||
return getMergedValue(undefined, a, prop , caseless);
|
return getMergedValue(undefined, a, caseless);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23029,7 +23031,7 @@ function mergeConfig(config1, config2) {
|
||||||
socketPath: defaultToConfig2,
|
socketPath: defaultToConfig2,
|
||||||
responseEncoding: defaultToConfig2,
|
responseEncoding: defaultToConfig2,
|
||||||
validateStatus: mergeDirectKeys,
|
validateStatus: mergeDirectKeys,
|
||||||
headers: (a, b , prop) => mergeDeepProperties(headersToObject(a), headersToObject(b),prop, true)
|
headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
|
utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
|
||||||
|
@ -23048,7 +23050,7 @@ const resolveConfig = (config) => {
|
||||||
|
|
||||||
newConfig.headers = headers = AxiosHeaders$1.from(headers);
|
newConfig.headers = headers = AxiosHeaders$1.from(headers);
|
||||||
|
|
||||||
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url, newConfig.allowAbsoluteUrls), config.params, config.paramsSerializer);
|
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer);
|
||||||
|
|
||||||
// HTTP basic authentication
|
// HTTP basic authentication
|
||||||
if (auth) {
|
if (auth) {
|
||||||
|
@ -23277,24 +23279,20 @@ const xhrAdapter = isXHRAdapterSupported && function (config) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const composeSignals = (signals, timeout) => {
|
const composeSignals = (signals, timeout) => {
|
||||||
const {length} = (signals = signals ? signals.filter(Boolean) : []);
|
|
||||||
|
|
||||||
if (timeout || length) {
|
|
||||||
let controller = new AbortController();
|
let controller = new AbortController();
|
||||||
|
|
||||||
let aborted;
|
let aborted;
|
||||||
|
|
||||||
const onabort = function (reason) {
|
const onabort = function (cancel) {
|
||||||
if (!aborted) {
|
if (!aborted) {
|
||||||
aborted = true;
|
aborted = true;
|
||||||
unsubscribe();
|
unsubscribe();
|
||||||
const err = reason instanceof Error ? reason : this.reason;
|
const err = cancel instanceof Error ? cancel : this.reason;
|
||||||
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
|
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let timer = timeout && setTimeout(() => {
|
let timer = timeout && setTimeout(() => {
|
||||||
timer = null;
|
|
||||||
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT));
|
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT));
|
||||||
}, timeout);
|
}, timeout);
|
||||||
|
|
||||||
|
@ -23303,20 +23301,23 @@ const composeSignals = (signals, timeout) => {
|
||||||
timer && clearTimeout(timer);
|
timer && clearTimeout(timer);
|
||||||
timer = null;
|
timer = null;
|
||||||
signals.forEach(signal => {
|
signals.forEach(signal => {
|
||||||
signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort);
|
signal &&
|
||||||
|
(signal.removeEventListener ? signal.removeEventListener('abort', onabort) : signal.unsubscribe(onabort));
|
||||||
});
|
});
|
||||||
signals = null;
|
signals = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
signals.forEach((signal) => signal.addEventListener('abort', onabort));
|
signals.forEach((signal) => signal && signal.addEventListener && signal.addEventListener('abort', onabort));
|
||||||
|
|
||||||
const {signal} = controller;
|
const {signal} = controller;
|
||||||
|
|
||||||
signal.unsubscribe = () => utils$1.asap(unsubscribe);
|
signal.unsubscribe = unsubscribe;
|
||||||
|
|
||||||
return signal;
|
return [signal, () => {
|
||||||
}
|
timer && clearTimeout(timer);
|
||||||
|
timer = null;
|
||||||
|
}];
|
||||||
};
|
};
|
||||||
|
|
||||||
const composeSignals$1 = composeSignals;
|
const composeSignals$1 = composeSignals;
|
||||||
|
@ -23339,34 +23340,14 @@ const streamChunk = function* (chunk, chunkSize) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const readBytes = async function* (iterable, chunkSize) {
|
const readBytes = async function* (iterable, chunkSize, encode) {
|
||||||
for await (const chunk of readStream(iterable)) {
|
for await (const chunk of iterable) {
|
||||||
yield* streamChunk(chunk, chunkSize);
|
yield* streamChunk(ArrayBuffer.isView(chunk) ? chunk : (await encode(String(chunk))), chunkSize);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const readStream = async function* (stream) {
|
const trackStream = (stream, chunkSize, onProgress, onFinish, encode) => {
|
||||||
if (stream[Symbol.asyncIterator]) {
|
const iterator = readBytes(stream, chunkSize, encode);
|
||||||
yield* stream;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const reader = stream.getReader();
|
|
||||||
try {
|
|
||||||
for (;;) {
|
|
||||||
const {done, value} = await reader.read();
|
|
||||||
if (done) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
yield value;
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
await reader.cancel();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const trackStream = (stream, chunkSize, onProgress, onFinish) => {
|
|
||||||
const iterator = readBytes(stream, chunkSize);
|
|
||||||
|
|
||||||
let bytes = 0;
|
let bytes = 0;
|
||||||
let done;
|
let done;
|
||||||
|
@ -23469,11 +23450,7 @@ const getBodyLength = async (body) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if(utils$1.isSpecCompliantForm(body)) {
|
if(utils$1.isSpecCompliantForm(body)) {
|
||||||
const _request = new Request(platform.origin, {
|
return (await new Request(body).arrayBuffer()).byteLength;
|
||||||
method: 'POST',
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
return (await _request.arrayBuffer()).byteLength;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) {
|
if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) {
|
||||||
|
@ -23513,14 +23490,19 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
|
|
||||||
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
||||||
|
|
||||||
let composedSignal = composeSignals$1([signal, cancelToken && cancelToken.toAbortSignal()], timeout);
|
let [composedSignal, stopTimeout] = (signal || cancelToken || timeout) ?
|
||||||
|
composeSignals$1([signal, cancelToken], timeout) : [];
|
||||||
|
|
||||||
let request;
|
let finished, request;
|
||||||
|
|
||||||
const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => {
|
const onFinish = () => {
|
||||||
composedSignal.unsubscribe();
|
!finished && setTimeout(() => {
|
||||||
|
composedSignal && composedSignal.unsubscribe();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
finished = true;
|
||||||
|
};
|
||||||
|
|
||||||
let requestContentLength;
|
let requestContentLength;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -23546,7 +23528,7 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
progressEventReducer(asyncDecorator(onUploadProgress))
|
progressEventReducer(asyncDecorator(onUploadProgress))
|
||||||
);
|
);
|
||||||
|
|
||||||
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush);
|
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush, encodeText);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23554,9 +23536,6 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
withCredentials = withCredentials ? 'include' : 'omit';
|
withCredentials = withCredentials ? 'include' : 'omit';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cloudflare Workers throws when credentials are defined
|
|
||||||
// see https://github.com/cloudflare/workerd/issues/902
|
|
||||||
const isCredentialsSupported = "credentials" in Request.prototype;
|
|
||||||
request = new Request(url, {
|
request = new Request(url, {
|
||||||
...fetchOptions,
|
...fetchOptions,
|
||||||
signal: composedSignal,
|
signal: composedSignal,
|
||||||
|
@ -23564,14 +23543,14 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
headers: headers.normalize().toJSON(),
|
headers: headers.normalize().toJSON(),
|
||||||
body: data,
|
body: data,
|
||||||
duplex: "half",
|
duplex: "half",
|
||||||
credentials: isCredentialsSupported ? withCredentials : undefined
|
credentials: withCredentials
|
||||||
});
|
});
|
||||||
|
|
||||||
let response = await fetch(request);
|
let response = await fetch(request);
|
||||||
|
|
||||||
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
||||||
|
|
||||||
if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) {
|
if (supportsResponseStream && (onDownloadProgress || isStreamResponse)) {
|
||||||
const options = {};
|
const options = {};
|
||||||
|
|
||||||
['status', 'statusText', 'headers'].forEach(prop => {
|
['status', 'statusText', 'headers'].forEach(prop => {
|
||||||
|
@ -23588,8 +23567,8 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
response = new Response(
|
response = new Response(
|
||||||
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
||||||
flush && flush();
|
flush && flush();
|
||||||
unsubscribe && unsubscribe();
|
isStreamResponse && onFinish();
|
||||||
}),
|
}, encodeText),
|
||||||
options
|
options
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -23598,7 +23577,9 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
|
|
||||||
let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config);
|
let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config);
|
||||||
|
|
||||||
!isStreamResponse && unsubscribe && unsubscribe();
|
!isStreamResponse && onFinish();
|
||||||
|
|
||||||
|
stopTimeout && stopTimeout();
|
||||||
|
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
settle(resolve, reject, {
|
settle(resolve, reject, {
|
||||||
|
@ -23611,7 +23592,7 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
unsubscribe && unsubscribe();
|
onFinish();
|
||||||
|
|
||||||
if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) {
|
if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) {
|
||||||
throw Object.assign(
|
throw Object.assign(
|
||||||
|
@ -23822,14 +23803,6 @@ validators$1.transitional = function transitional(validator, version, message) {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
validators$1.spelling = function spelling(correctSpelling) {
|
|
||||||
return (value, opt) => {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.warn(`${opt} is likely a misspelling of ${correctSpelling}`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assert object's properties type
|
* Assert object's properties type
|
||||||
*
|
*
|
||||||
|
@ -23899,9 +23872,9 @@ class Axios {
|
||||||
return await this._request(configOrUrl, config);
|
return await this._request(configOrUrl, config);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err instanceof Error) {
|
if (err instanceof Error) {
|
||||||
let dummy = {};
|
let dummy;
|
||||||
|
|
||||||
Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error());
|
Error.captureStackTrace ? Error.captureStackTrace(dummy = {}) : (dummy = new Error());
|
||||||
|
|
||||||
// slice off the Error: ... line
|
// slice off the Error: ... line
|
||||||
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
||||||
|
@ -23956,18 +23929,6 @@ class Axios {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set config.allowAbsoluteUrls
|
|
||||||
if (config.allowAbsoluteUrls !== undefined) ; else if (this.defaults.allowAbsoluteUrls !== undefined) {
|
|
||||||
config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls;
|
|
||||||
} else {
|
|
||||||
config.allowAbsoluteUrls = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
validator.assertOptions(config, {
|
|
||||||
baseUrl: validators.spelling('baseURL'),
|
|
||||||
withXsrfToken: validators.spelling('withXSRFToken')
|
|
||||||
}, true);
|
|
||||||
|
|
||||||
// Set config.method
|
// Set config.method
|
||||||
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
||||||
|
|
||||||
|
@ -24058,7 +24019,7 @@ class Axios {
|
||||||
|
|
||||||
getUri(config) {
|
getUri(config) {
|
||||||
config = mergeConfig(this.defaults, config);
|
config = mergeConfig(this.defaults, config);
|
||||||
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
const fullPath = buildFullPath(config.baseURL, config.url);
|
||||||
return buildURL(fullPath, config.params, config.paramsSerializer);
|
return buildURL(fullPath, config.params, config.paramsSerializer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24198,20 +24159,6 @@ class CancelToken {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toAbortSignal() {
|
|
||||||
const controller = new AbortController();
|
|
||||||
|
|
||||||
const abort = (err) => {
|
|
||||||
controller.abort(err);
|
|
||||||
};
|
|
||||||
|
|
||||||
this.subscribe(abort);
|
|
||||||
|
|
||||||
controller.signal.unsubscribe = () => this.unsubscribe(abort);
|
|
||||||
|
|
||||||
return controller.signal;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
||||||
* cancels the `CancelToken`.
|
* cancels the `CancelToken`.
|
||||||
|
@ -24417,7 +24364,7 @@ module.exports = axios;
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@kie/git-backporting","version":"4.8.5","description":"Git backporting is a tool to execute automatic pull request git backporting.","author":"","license":"MIT","private":false,"main":"./dist/gha/index.js","bin":{"git-backporting":"./dist/cli/index.js"},"files":["dist/cli/index.js"],"publishConfig":{"access":"public"},"scripts":{"prepare":"husky install","clean":"rm -rf ./build ./dist","compile":"tsc -p tsconfig.json && tsc-alias -p tsconfig.json","package":"npm run package:cli && npm run package:gha","package:cli":"ncc build ./build/src/bin/cli.js -o dist/cli","package:gha":"ncc build ./build/src/bin/gha.js -o dist/gha","build":"npm run clean && npm run compile && npm run package","test":"jest --silent","test:report":"npm test -- --coverage --testResultsProcessor=jest-sonar-reporter","lint":"eslint . --ext .ts","lint:fix":"npm run lint -- --fix","ts-node":"ts-node","postversion":"npm run build && git add dist && rm -rf build","release":"release-it","release:branch":"git checkout -b release/$(release-it --release-version) main","release:prepare":"release-it --no-npm.publish --no-github.release --no-git.push --no-git.tag --no-git.requireUpstream","release:prepare:all":"npm run release:branch && npm run release:prepare"},"repository":{"type":"git","url":"git+https://github.com/kiegroup/git-backporting.git"},"keywords":["backporting","pull-requests","merge-requests","github-action","cherry-pick"],"bugs":{"url":"https://github.com/kiegroup/git-backporting/issues"},"homepage":"https://github.com/kiegroup/git-backporting#readme","devDependencies":{"@commitlint/cli":"^17.4.0","@commitlint/config-conventional":"^17.4.0","@gitbeaker/rest":"^39.1.0","@kie/mock-github":"^1.1.0","@octokit/webhooks-types":"^6.8.0","@release-it/conventional-changelog":"^10.0.0","@types/fs-extra":"^9.0.13","@types/jest":"^29.2.4","@types/node":"^18.11.17","@typescript-eslint/eslint-plugin":"^5.47.0","@typescript-eslint/parser":"^5.47.0","@vercel/ncc":"^0.36.0","eslint":"^8.30.0","husky":"^8.0.2","jest":"^29.0.0","jest-sonar-reporter":"^2.0.0","release-it":"^18.1.2","semver":"^7.3.8","ts-jest":"^29.0.0","ts-node":"^10.8.1","tsc-alias":"^1.8.2","tsconfig-paths":"^4.1.0","typescript":"^4.9.3"},"dependencies":{"@actions/core":"^1.10.0","@octokit/rest":"^18.12.0","axios":"^1.4.0","commander":"^9.3.0","fs-extra":"^11.1.0","https":"^1.0.0","simple-git":"^3.15.1"}}');
|
module.exports = JSON.parse('{"name":"@kie/git-backporting","version":"4.8.2","description":"Git backporting is a tool to execute automatic pull request git backporting.","author":"","license":"MIT","private":false,"main":"./dist/gha/index.js","bin":{"git-backporting":"./dist/cli/index.js"},"files":["dist/cli/index.js"],"publishConfig":{"access":"public"},"scripts":{"prepare":"husky install","clean":"rm -rf ./build ./dist","compile":"tsc -p tsconfig.json && tsc-alias -p tsconfig.json","package":"npm run package:cli && npm run package:gha","package:cli":"ncc build ./build/src/bin/cli.js -o dist/cli","package:gha":"ncc build ./build/src/bin/gha.js -o dist/gha","build":"npm run clean && npm run compile && npm run package","test":"jest --silent","test:report":"npm test -- --coverage --testResultsProcessor=jest-sonar-reporter","lint":"eslint . --ext .ts","lint:fix":"npm run lint -- --fix","ts-node":"ts-node","postversion":"npm run build && git add dist && rm -rf build","release":"release-it","release:branch":"git checkout -b release/$(release-it --release-version) main","release:prepare":"release-it --no-npm.publish --no-github.release --no-git.push --no-git.tag --no-git.requireUpstream","release:prepare:all":"npm run release:branch && npm run release:prepare"},"repository":{"type":"git","url":"git+https://github.com/kiegroup/git-backporting.git"},"keywords":["backporting","pull-requests","merge-requests","github-action","cherry-pick"],"bugs":{"url":"https://github.com/kiegroup/git-backporting/issues"},"homepage":"https://github.com/kiegroup/git-backporting#readme","devDependencies":{"@commitlint/cli":"^17.4.0","@commitlint/config-conventional":"^17.4.0","@gitbeaker/rest":"^39.1.0","@kie/mock-github":"^1.1.0","@octokit/webhooks-types":"^6.8.0","@release-it/conventional-changelog":"^7.0.0","@types/fs-extra":"^9.0.13","@types/jest":"^29.2.4","@types/node":"^18.11.17","@typescript-eslint/eslint-plugin":"^5.47.0","@typescript-eslint/parser":"^5.47.0","@vercel/ncc":"^0.36.0","eslint":"^8.30.0","husky":"^8.0.2","jest":"^29.0.0","jest-sonar-reporter":"^2.0.0","release-it":"^16.1.3","semver":"^7.3.8","ts-jest":"^29.0.0","ts-node":"^10.8.1","tsc-alias":"^1.8.2","tsconfig-paths":"^4.1.0","typescript":"^4.9.3"},"dependencies":{"@actions/core":"^1.10.0","@octokit/rest":"^18.12.0","axios":"^1.4.0","commander":"^9.3.0","fs-extra":"^11.1.0","https":"^1.0.0","simple-git":"^3.15.1"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
|
345
dist/gha/index.js
vendored
345
dist/gha/index.js
vendored
|
@ -506,13 +506,6 @@ class GitCLIService {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
||||||
// ensure the working tree is properly reset - no stale changes
|
|
||||||
// from previous (failed) backport
|
|
||||||
const ongoingCherryPick = await this.anyConflict(to);
|
|
||||||
if (ongoingCherryPick) {
|
|
||||||
this.logger.warn("Found previously failed cherry-pick, aborting it");
|
|
||||||
await this.git(to).raw(["cherry-pick", "--abort"]);
|
|
||||||
}
|
|
||||||
// checkout to the proper branch
|
// checkout to the proper branch
|
||||||
this.logger.info(`Checking out branch ${branch}`);
|
this.logger.info(`Checking out branch ${branch}`);
|
||||||
await this.git(to).checkout(branch);
|
await this.git(to).checkout(branch);
|
||||||
|
@ -570,20 +563,6 @@ class GitCLIService {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* Check whether there are some conflicts in the current working directory
|
|
||||||
* which means there is an ongoing cherry-pick that did not complete successfully
|
|
||||||
* @param cwd repository in which the check should be performed
|
|
||||||
* @return true if there is some conflict, false otherwise
|
|
||||||
*/
|
|
||||||
async anyConflict(cwd) {
|
|
||||||
const status = await this.git(cwd).status();
|
|
||||||
if (status.conflicted.length > 0) {
|
|
||||||
this.logger.debug(`Found conflicts in branch ${status.current}`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Push a branch to a remote
|
* Push a branch to a remote
|
||||||
* @param cwd repository in which the push should be performed
|
* @param cwd repository in which the push should be performed
|
||||||
|
@ -719,7 +698,7 @@ exports.inferGitApiUrl = inferGitApiUrl;
|
||||||
/**
|
/**
|
||||||
* Infer the value of the squash option
|
* Infer the value of the squash option
|
||||||
* @param open true if the pull/merge request is still open
|
* @param open true if the pull/merge request is still open
|
||||||
* @param squash_commit undefined or null if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
* @param squash_commit undefined if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
||||||
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
||||||
*/
|
*/
|
||||||
const inferSquash = (open, squash_commit) => {
|
const inferSquash = (open, squash_commit) => {
|
||||||
|
@ -729,7 +708,7 @@ const inferSquash = (open, squash_commit) => {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (squash_commit) {
|
if (squash_commit !== undefined) {
|
||||||
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -1116,9 +1095,7 @@ class GitLabClient {
|
||||||
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
||||||
async getPullRequest(namespace, repo, mrNumber, squash) {
|
async getPullRequest(namespace, repo, mrNumber, squash) {
|
||||||
const projectId = this.getProjectId(namespace, repo);
|
const projectId = this.getProjectId(namespace, repo);
|
||||||
const url = `/projects/${projectId}/merge_requests/${mrNumber}`;
|
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${mrNumber}`);
|
||||||
this.logger.debug(`Fetching pull request ${url}`);
|
|
||||||
const { data } = await this.client.get(`${url}`);
|
|
||||||
if (squash === undefined) {
|
if (squash === undefined) {
|
||||||
squash = (0, git_util_1.inferSquash)(data.state === "opened", data.squash_commit_sha);
|
squash = (0, git_util_1.inferSquash)(data.state === "opened", data.squash_commit_sha);
|
||||||
}
|
}
|
||||||
|
@ -1208,7 +1185,7 @@ class GitLabClient {
|
||||||
try {
|
try {
|
||||||
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
||||||
const projectId = this.getProjectId(namespace, project);
|
const projectId = this.getProjectId(namespace, project);
|
||||||
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${id}/notes`, {
|
const { data } = await this.client.post(`/projects/${projectId}/issues/${id}/notes`, {
|
||||||
body: comment,
|
body: comment,
|
||||||
});
|
});
|
||||||
if (!data) {
|
if (!data) {
|
||||||
|
@ -19044,11 +19021,10 @@ module.exports = require("zlib");
|
||||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
/*! Axios v1.8.4 Copyright (c) 2025 Matt Zabriskie and contributors */
|
// Axios v1.7.4 Copyright (c) 2024 Matt Zabriskie and contributors
|
||||||
|
|
||||||
|
|
||||||
const FormData$1 = __nccwpck_require__(4334);
|
const FormData$1 = __nccwpck_require__(4334);
|
||||||
const crypto = __nccwpck_require__(6113);
|
|
||||||
const url = __nccwpck_require__(7310);
|
const url = __nccwpck_require__(7310);
|
||||||
const proxyFromEnv = __nccwpck_require__(3329);
|
const proxyFromEnv = __nccwpck_require__(3329);
|
||||||
const http = __nccwpck_require__(3685);
|
const http = __nccwpck_require__(3685);
|
||||||
|
@ -19062,9 +19038,7 @@ const events = __nccwpck_require__(2361);
|
||||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||||
|
|
||||||
const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1);
|
const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1);
|
||||||
const crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
|
|
||||||
const url__default = /*#__PURE__*/_interopDefaultLegacy(url);
|
const url__default = /*#__PURE__*/_interopDefaultLegacy(url);
|
||||||
const proxyFromEnv__default = /*#__PURE__*/_interopDefaultLegacy(proxyFromEnv);
|
|
||||||
const http__default = /*#__PURE__*/_interopDefaultLegacy(http);
|
const http__default = /*#__PURE__*/_interopDefaultLegacy(http);
|
||||||
const https__default = /*#__PURE__*/_interopDefaultLegacy(https);
|
const https__default = /*#__PURE__*/_interopDefaultLegacy(https);
|
||||||
const util__default = /*#__PURE__*/_interopDefaultLegacy(util);
|
const util__default = /*#__PURE__*/_interopDefaultLegacy(util);
|
||||||
|
@ -19678,6 +19652,26 @@ const toFiniteNumber = (value, defaultValue) => {
|
||||||
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
|
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ALPHA = 'abcdefghijklmnopqrstuvwxyz';
|
||||||
|
|
||||||
|
const DIGIT = '0123456789';
|
||||||
|
|
||||||
|
const ALPHABET = {
|
||||||
|
DIGIT,
|
||||||
|
ALPHA,
|
||||||
|
ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT
|
||||||
|
};
|
||||||
|
|
||||||
|
const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => {
|
||||||
|
let str = '';
|
||||||
|
const {length} = alphabet;
|
||||||
|
while (size--) {
|
||||||
|
str += alphabet[Math.random() * length|0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If the thing is a FormData object, return true, otherwise return false.
|
* If the thing is a FormData object, return true, otherwise return false.
|
||||||
*
|
*
|
||||||
|
@ -19805,6 +19799,8 @@ const utils$1 = {
|
||||||
findKey,
|
findKey,
|
||||||
global: _global,
|
global: _global,
|
||||||
isContextDefined,
|
isContextDefined,
|
||||||
|
ALPHABET,
|
||||||
|
generateString,
|
||||||
isSpecCompliantForm,
|
isSpecCompliantForm,
|
||||||
toJSONObject,
|
toJSONObject,
|
||||||
isAsyncFn,
|
isAsyncFn,
|
||||||
|
@ -19838,10 +19834,7 @@ function AxiosError(message, code, config, request, response) {
|
||||||
code && (this.code = code);
|
code && (this.code = code);
|
||||||
config && (this.config = config);
|
config && (this.config = config);
|
||||||
request && (this.request = request);
|
request && (this.request = request);
|
||||||
if (response) {
|
response && (this.response = response);
|
||||||
this.response = response;
|
|
||||||
this.status = response.status ? response.status : null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
utils$1.inherits(AxiosError, Error, {
|
utils$1.inherits(AxiosError, Error, {
|
||||||
|
@ -19861,7 +19854,7 @@ utils$1.inherits(AxiosError, Error, {
|
||||||
// Axios
|
// Axios
|
||||||
config: utils$1.toJSONObject(this.config),
|
config: utils$1.toJSONObject(this.config),
|
||||||
code: this.code,
|
code: this.code,
|
||||||
status: this.status
|
status: this.response && this.response.status ? this.response.status : null
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -20198,7 +20191,7 @@ function encode(val) {
|
||||||
*
|
*
|
||||||
* @param {string} url The base of the url (e.g., http://www.google.com)
|
* @param {string} url The base of the url (e.g., http://www.google.com)
|
||||||
* @param {object} [params] The params to be appended
|
* @param {object} [params] The params to be appended
|
||||||
* @param {?(object|Function)} options
|
* @param {?object} options
|
||||||
*
|
*
|
||||||
* @returns {string} The formatted url
|
* @returns {string} The formatted url
|
||||||
*/
|
*/
|
||||||
|
@ -20210,12 +20203,6 @@ function buildURL(url, params, options) {
|
||||||
|
|
||||||
const _encode = options && options.encode || encode;
|
const _encode = options && options.encode || encode;
|
||||||
|
|
||||||
if (utils$1.isFunction(options)) {
|
|
||||||
options = {
|
|
||||||
serialize: options
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const serializeFn = options && options.serialize;
|
const serializeFn = options && options.serialize;
|
||||||
|
|
||||||
let serializedParams;
|
let serializedParams;
|
||||||
|
@ -20316,29 +20303,6 @@ const transitionalDefaults = {
|
||||||
|
|
||||||
const URLSearchParams = url__default["default"].URLSearchParams;
|
const URLSearchParams = url__default["default"].URLSearchParams;
|
||||||
|
|
||||||
const ALPHA = 'abcdefghijklmnopqrstuvwxyz';
|
|
||||||
|
|
||||||
const DIGIT = '0123456789';
|
|
||||||
|
|
||||||
const ALPHABET = {
|
|
||||||
DIGIT,
|
|
||||||
ALPHA,
|
|
||||||
ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT
|
|
||||||
};
|
|
||||||
|
|
||||||
const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => {
|
|
||||||
let str = '';
|
|
||||||
const {length} = alphabet;
|
|
||||||
const randomValues = new Uint32Array(size);
|
|
||||||
crypto__default["default"].randomFillSync(randomValues);
|
|
||||||
for (let i = 0; i < size; i++) {
|
|
||||||
str += alphabet[randomValues[i] % length];
|
|
||||||
}
|
|
||||||
|
|
||||||
return str;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const platform$1 = {
|
const platform$1 = {
|
||||||
isNode: true,
|
isNode: true,
|
||||||
classes: {
|
classes: {
|
||||||
|
@ -20346,15 +20310,11 @@ const platform$1 = {
|
||||||
FormData: FormData__default["default"],
|
FormData: FormData__default["default"],
|
||||||
Blob: typeof Blob !== 'undefined' && Blob || null
|
Blob: typeof Blob !== 'undefined' && Blob || null
|
||||||
},
|
},
|
||||||
ALPHABET,
|
|
||||||
generateString,
|
|
||||||
protocols: [ 'http', 'https', 'file', 'data' ]
|
protocols: [ 'http', 'https', 'file', 'data' ]
|
||||||
};
|
};
|
||||||
|
|
||||||
const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined';
|
const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined';
|
||||||
|
|
||||||
const _navigator = typeof navigator === 'object' && navigator || undefined;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if we're running in a standard browser environment
|
* Determine if we're running in a standard browser environment
|
||||||
*
|
*
|
||||||
|
@ -20372,8 +20332,10 @@ const _navigator = typeof navigator === 'object' && navigator || undefined;
|
||||||
*
|
*
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
const hasStandardBrowserEnv = hasBrowserEnv &&
|
const hasStandardBrowserEnv = (
|
||||||
(!_navigator || ['ReactNative', 'NativeScript', 'NS'].indexOf(_navigator.product) < 0);
|
(product) => {
|
||||||
|
return hasBrowserEnv && ['ReactNative', 'NativeScript', 'NS'].indexOf(product) < 0
|
||||||
|
})(typeof navigator !== 'undefined' && navigator.product);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine if we're running in a standard browser webWorker environment
|
* Determine if we're running in a standard browser webWorker environment
|
||||||
|
@ -20400,7 +20362,6 @@ const utils = /*#__PURE__*/Object.freeze({
|
||||||
hasBrowserEnv: hasBrowserEnv,
|
hasBrowserEnv: hasBrowserEnv,
|
||||||
hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv,
|
hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv,
|
||||||
hasStandardBrowserEnv: hasStandardBrowserEnv,
|
hasStandardBrowserEnv: hasStandardBrowserEnv,
|
||||||
navigator: _navigator,
|
|
||||||
origin: origin
|
origin: origin
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -21122,15 +21083,14 @@ function combineURLs(baseURL, relativeURL) {
|
||||||
*
|
*
|
||||||
* @returns {string} The combined full path
|
* @returns {string} The combined full path
|
||||||
*/
|
*/
|
||||||
function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) {
|
function buildFullPath(baseURL, requestedURL) {
|
||||||
let isRelativeUrl = !isAbsoluteURL(requestedURL);
|
if (baseURL && !isAbsoluteURL(requestedURL)) {
|
||||||
if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) {
|
|
||||||
return combineURLs(baseURL, requestedURL);
|
return combineURLs(baseURL, requestedURL);
|
||||||
}
|
}
|
||||||
return requestedURL;
|
return requestedURL;
|
||||||
}
|
}
|
||||||
|
|
||||||
const VERSION = "1.8.4";
|
const VERSION = "1.7.4";
|
||||||
|
|
||||||
function parseProtocol(url) {
|
function parseProtocol(url) {
|
||||||
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
|
const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url);
|
||||||
|
@ -21340,9 +21300,9 @@ const readBlob = async function* (blob) {
|
||||||
|
|
||||||
const readBlob$1 = readBlob;
|
const readBlob$1 = readBlob;
|
||||||
|
|
||||||
const BOUNDARY_ALPHABET = platform.ALPHABET.ALPHA_DIGIT + '-_';
|
const BOUNDARY_ALPHABET = utils$1.ALPHABET.ALPHA_DIGIT + '-_';
|
||||||
|
|
||||||
const textEncoder = typeof TextEncoder === 'function' ? new TextEncoder() : new util__default["default"].TextEncoder();
|
const textEncoder = new util.TextEncoder();
|
||||||
|
|
||||||
const CRLF = '\r\n';
|
const CRLF = '\r\n';
|
||||||
const CRLF_BYTES = textEncoder.encode(CRLF);
|
const CRLF_BYTES = textEncoder.encode(CRLF);
|
||||||
|
@ -21400,7 +21360,7 @@ const formDataToStream = (form, headersHandler, options) => {
|
||||||
const {
|
const {
|
||||||
tag = 'form-data-boundary',
|
tag = 'form-data-boundary',
|
||||||
size = 25,
|
size = 25,
|
||||||
boundary = tag + '-' + platform.generateString(size, BOUNDARY_ALPHABET)
|
boundary = tag + '-' + utils$1.generateString(size, BOUNDARY_ALPHABET)
|
||||||
} = options || {};
|
} = options || {};
|
||||||
|
|
||||||
if(!utils$1.isFormData(form)) {
|
if(!utils$1.isFormData(form)) {
|
||||||
|
@ -21680,7 +21640,7 @@ function dispatchBeforeRedirect(options, responseDetails) {
|
||||||
function setProxy(options, configProxy, location) {
|
function setProxy(options, configProxy, location) {
|
||||||
let proxy = configProxy;
|
let proxy = configProxy;
|
||||||
if (!proxy && proxy !== false) {
|
if (!proxy && proxy !== false) {
|
||||||
const proxyUrl = proxyFromEnv__default["default"].getProxyForUrl(location);
|
const proxyUrl = proxyFromEnv.getProxyForUrl(location);
|
||||||
if (proxyUrl) {
|
if (proxyUrl) {
|
||||||
proxy = new URL(proxyUrl);
|
proxy = new URL(proxyUrl);
|
||||||
}
|
}
|
||||||
|
@ -21825,8 +21785,8 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse url
|
// Parse url
|
||||||
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
const fullPath = buildFullPath(config.baseURL, config.url);
|
||||||
const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined);
|
const parsed = new URL(fullPath, utils$1.hasBrowserEnv ? platform.origin : undefined);
|
||||||
const protocol = parsed.protocol || supportedProtocols[0];
|
const protocol = parsed.protocol || supportedProtocols[0];
|
||||||
|
|
||||||
if (protocol === 'data:') {
|
if (protocol === 'data:') {
|
||||||
|
@ -21911,7 +21871,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (utils$1.isBlob(data) || utils$1.isFile(data)) {
|
} else if (utils$1.isBlob(data)) {
|
||||||
data.size && headers.setContentType(data.type || 'application/octet-stream');
|
data.size && headers.setContentType(data.type || 'application/octet-stream');
|
||||||
headers.setContentLength(data.size || 0);
|
headers.setContentLength(data.size || 0);
|
||||||
data = stream__default["default"].Readable.from(readBlob$1(data));
|
data = stream__default["default"].Readable.from(readBlob$1(data));
|
||||||
|
@ -22022,7 +21982,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
if (config.socketPath) {
|
if (config.socketPath) {
|
||||||
options.socketPath = config.socketPath;
|
options.socketPath = config.socketPath;
|
||||||
} else {
|
} else {
|
||||||
options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname;
|
options.hostname = parsed.hostname;
|
||||||
options.port = parsed.port;
|
options.port = parsed.port;
|
||||||
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
||||||
}
|
}
|
||||||
|
@ -22164,7 +22124,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const err = new AxiosError(
|
const err = new AxiosError(
|
||||||
'stream has been aborted',
|
'maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
||||||
AxiosError.ERR_BAD_RESPONSE,
|
AxiosError.ERR_BAD_RESPONSE,
|
||||||
config,
|
config,
|
||||||
lastRequest
|
lastRequest
|
||||||
|
@ -22287,18 +22247,68 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const isURLSameOrigin = platform.hasStandardBrowserEnv ? ((origin, isMSIE) => (url) => {
|
const isURLSameOrigin = platform.hasStandardBrowserEnv ?
|
||||||
url = new URL(url, platform.origin);
|
|
||||||
|
|
||||||
return (
|
// Standard browser envs have full support of the APIs needed to test
|
||||||
origin.protocol === url.protocol &&
|
// whether the request URL is of the same origin as current location.
|
||||||
origin.host === url.host &&
|
(function standardBrowserEnv() {
|
||||||
(isMSIE || origin.port === url.port)
|
const msie = /(msie|trident)/i.test(navigator.userAgent);
|
||||||
);
|
const urlParsingNode = document.createElement('a');
|
||||||
})(
|
let originURL;
|
||||||
new URL(platform.origin),
|
|
||||||
platform.navigator && /(msie|trident)/i.test(platform.navigator.userAgent)
|
/**
|
||||||
) : () => true;
|
* Parse a URL to discover its components
|
||||||
|
*
|
||||||
|
* @param {String} url The URL to be parsed
|
||||||
|
* @returns {Object}
|
||||||
|
*/
|
||||||
|
function resolveURL(url) {
|
||||||
|
let href = url;
|
||||||
|
|
||||||
|
if (msie) {
|
||||||
|
// IE needs attribute set twice to normalize properties
|
||||||
|
urlParsingNode.setAttribute('href', href);
|
||||||
|
href = urlParsingNode.href;
|
||||||
|
}
|
||||||
|
|
||||||
|
urlParsingNode.setAttribute('href', href);
|
||||||
|
|
||||||
|
// urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils
|
||||||
|
return {
|
||||||
|
href: urlParsingNode.href,
|
||||||
|
protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '',
|
||||||
|
host: urlParsingNode.host,
|
||||||
|
search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '',
|
||||||
|
hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '',
|
||||||
|
hostname: urlParsingNode.hostname,
|
||||||
|
port: urlParsingNode.port,
|
||||||
|
pathname: (urlParsingNode.pathname.charAt(0) === '/') ?
|
||||||
|
urlParsingNode.pathname :
|
||||||
|
'/' + urlParsingNode.pathname
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
originURL = resolveURL(window.location.href);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if a URL shares the same origin as the current location
|
||||||
|
*
|
||||||
|
* @param {String} requestURL The URL to test
|
||||||
|
* @returns {boolean} True if URL shares the same origin, otherwise false
|
||||||
|
*/
|
||||||
|
return function isURLSameOrigin(requestURL) {
|
||||||
|
const parsed = (utils$1.isString(requestURL)) ? resolveURL(requestURL) : requestURL;
|
||||||
|
return (parsed.protocol === originURL.protocol &&
|
||||||
|
parsed.host === originURL.host);
|
||||||
|
};
|
||||||
|
})() :
|
||||||
|
|
||||||
|
// Non standard browser envs (web workers, react-native) lack needed support.
|
||||||
|
(function nonStandardBrowserEnv() {
|
||||||
|
return function isURLSameOrigin() {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
const cookies = platform.hasStandardBrowserEnv ?
|
const cookies = platform.hasStandardBrowserEnv ?
|
||||||
|
|
||||||
|
@ -22355,7 +22365,7 @@ function mergeConfig(config1, config2) {
|
||||||
config2 = config2 || {};
|
config2 = config2 || {};
|
||||||
const config = {};
|
const config = {};
|
||||||
|
|
||||||
function getMergedValue(target, source, prop, caseless) {
|
function getMergedValue(target, source, caseless) {
|
||||||
if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) {
|
if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) {
|
||||||
return utils$1.merge.call({caseless}, target, source);
|
return utils$1.merge.call({caseless}, target, source);
|
||||||
} else if (utils$1.isPlainObject(source)) {
|
} else if (utils$1.isPlainObject(source)) {
|
||||||
|
@ -22367,11 +22377,11 @@ function mergeConfig(config1, config2) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
// eslint-disable-next-line consistent-return
|
||||||
function mergeDeepProperties(a, b, prop , caseless) {
|
function mergeDeepProperties(a, b, caseless) {
|
||||||
if (!utils$1.isUndefined(b)) {
|
if (!utils$1.isUndefined(b)) {
|
||||||
return getMergedValue(a, b, prop , caseless);
|
return getMergedValue(a, b, caseless);
|
||||||
} else if (!utils$1.isUndefined(a)) {
|
} else if (!utils$1.isUndefined(a)) {
|
||||||
return getMergedValue(undefined, a, prop , caseless);
|
return getMergedValue(undefined, a, caseless);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22429,7 +22439,7 @@ function mergeConfig(config1, config2) {
|
||||||
socketPath: defaultToConfig2,
|
socketPath: defaultToConfig2,
|
||||||
responseEncoding: defaultToConfig2,
|
responseEncoding: defaultToConfig2,
|
||||||
validateStatus: mergeDirectKeys,
|
validateStatus: mergeDirectKeys,
|
||||||
headers: (a, b , prop) => mergeDeepProperties(headersToObject(a), headersToObject(b),prop, true)
|
headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
|
utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) {
|
||||||
|
@ -22448,7 +22458,7 @@ const resolveConfig = (config) => {
|
||||||
|
|
||||||
newConfig.headers = headers = AxiosHeaders$1.from(headers);
|
newConfig.headers = headers = AxiosHeaders$1.from(headers);
|
||||||
|
|
||||||
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url, newConfig.allowAbsoluteUrls), config.params, config.paramsSerializer);
|
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer);
|
||||||
|
|
||||||
// HTTP basic authentication
|
// HTTP basic authentication
|
||||||
if (auth) {
|
if (auth) {
|
||||||
|
@ -22677,24 +22687,20 @@ const xhrAdapter = isXHRAdapterSupported && function (config) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const composeSignals = (signals, timeout) => {
|
const composeSignals = (signals, timeout) => {
|
||||||
const {length} = (signals = signals ? signals.filter(Boolean) : []);
|
|
||||||
|
|
||||||
if (timeout || length) {
|
|
||||||
let controller = new AbortController();
|
let controller = new AbortController();
|
||||||
|
|
||||||
let aborted;
|
let aborted;
|
||||||
|
|
||||||
const onabort = function (reason) {
|
const onabort = function (cancel) {
|
||||||
if (!aborted) {
|
if (!aborted) {
|
||||||
aborted = true;
|
aborted = true;
|
||||||
unsubscribe();
|
unsubscribe();
|
||||||
const err = reason instanceof Error ? reason : this.reason;
|
const err = cancel instanceof Error ? cancel : this.reason;
|
||||||
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
|
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let timer = timeout && setTimeout(() => {
|
let timer = timeout && setTimeout(() => {
|
||||||
timer = null;
|
|
||||||
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT));
|
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT));
|
||||||
}, timeout);
|
}, timeout);
|
||||||
|
|
||||||
|
@ -22703,20 +22709,23 @@ const composeSignals = (signals, timeout) => {
|
||||||
timer && clearTimeout(timer);
|
timer && clearTimeout(timer);
|
||||||
timer = null;
|
timer = null;
|
||||||
signals.forEach(signal => {
|
signals.forEach(signal => {
|
||||||
signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort);
|
signal &&
|
||||||
|
(signal.removeEventListener ? signal.removeEventListener('abort', onabort) : signal.unsubscribe(onabort));
|
||||||
});
|
});
|
||||||
signals = null;
|
signals = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
signals.forEach((signal) => signal.addEventListener('abort', onabort));
|
signals.forEach((signal) => signal && signal.addEventListener && signal.addEventListener('abort', onabort));
|
||||||
|
|
||||||
const {signal} = controller;
|
const {signal} = controller;
|
||||||
|
|
||||||
signal.unsubscribe = () => utils$1.asap(unsubscribe);
|
signal.unsubscribe = unsubscribe;
|
||||||
|
|
||||||
return signal;
|
return [signal, () => {
|
||||||
}
|
timer && clearTimeout(timer);
|
||||||
|
timer = null;
|
||||||
|
}];
|
||||||
};
|
};
|
||||||
|
|
||||||
const composeSignals$1 = composeSignals;
|
const composeSignals$1 = composeSignals;
|
||||||
|
@ -22739,34 +22748,14 @@ const streamChunk = function* (chunk, chunkSize) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const readBytes = async function* (iterable, chunkSize) {
|
const readBytes = async function* (iterable, chunkSize, encode) {
|
||||||
for await (const chunk of readStream(iterable)) {
|
for await (const chunk of iterable) {
|
||||||
yield* streamChunk(chunk, chunkSize);
|
yield* streamChunk(ArrayBuffer.isView(chunk) ? chunk : (await encode(String(chunk))), chunkSize);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const readStream = async function* (stream) {
|
const trackStream = (stream, chunkSize, onProgress, onFinish, encode) => {
|
||||||
if (stream[Symbol.asyncIterator]) {
|
const iterator = readBytes(stream, chunkSize, encode);
|
||||||
yield* stream;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const reader = stream.getReader();
|
|
||||||
try {
|
|
||||||
for (;;) {
|
|
||||||
const {done, value} = await reader.read();
|
|
||||||
if (done) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
yield value;
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
await reader.cancel();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const trackStream = (stream, chunkSize, onProgress, onFinish) => {
|
|
||||||
const iterator = readBytes(stream, chunkSize);
|
|
||||||
|
|
||||||
let bytes = 0;
|
let bytes = 0;
|
||||||
let done;
|
let done;
|
||||||
|
@ -22869,11 +22858,7 @@ const getBodyLength = async (body) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if(utils$1.isSpecCompliantForm(body)) {
|
if(utils$1.isSpecCompliantForm(body)) {
|
||||||
const _request = new Request(platform.origin, {
|
return (await new Request(body).arrayBuffer()).byteLength;
|
||||||
method: 'POST',
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
return (await _request.arrayBuffer()).byteLength;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) {
|
if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) {
|
||||||
|
@ -22913,14 +22898,19 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
|
|
||||||
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
|
||||||
|
|
||||||
let composedSignal = composeSignals$1([signal, cancelToken && cancelToken.toAbortSignal()], timeout);
|
let [composedSignal, stopTimeout] = (signal || cancelToken || timeout) ?
|
||||||
|
composeSignals$1([signal, cancelToken], timeout) : [];
|
||||||
|
|
||||||
let request;
|
let finished, request;
|
||||||
|
|
||||||
const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => {
|
const onFinish = () => {
|
||||||
composedSignal.unsubscribe();
|
!finished && setTimeout(() => {
|
||||||
|
composedSignal && composedSignal.unsubscribe();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
finished = true;
|
||||||
|
};
|
||||||
|
|
||||||
let requestContentLength;
|
let requestContentLength;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -22946,7 +22936,7 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
progressEventReducer(asyncDecorator(onUploadProgress))
|
progressEventReducer(asyncDecorator(onUploadProgress))
|
||||||
);
|
);
|
||||||
|
|
||||||
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush);
|
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush, encodeText);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22954,9 +22944,6 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
withCredentials = withCredentials ? 'include' : 'omit';
|
withCredentials = withCredentials ? 'include' : 'omit';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cloudflare Workers throws when credentials are defined
|
|
||||||
// see https://github.com/cloudflare/workerd/issues/902
|
|
||||||
const isCredentialsSupported = "credentials" in Request.prototype;
|
|
||||||
request = new Request(url, {
|
request = new Request(url, {
|
||||||
...fetchOptions,
|
...fetchOptions,
|
||||||
signal: composedSignal,
|
signal: composedSignal,
|
||||||
|
@ -22964,14 +22951,14 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
headers: headers.normalize().toJSON(),
|
headers: headers.normalize().toJSON(),
|
||||||
body: data,
|
body: data,
|
||||||
duplex: "half",
|
duplex: "half",
|
||||||
credentials: isCredentialsSupported ? withCredentials : undefined
|
credentials: withCredentials
|
||||||
});
|
});
|
||||||
|
|
||||||
let response = await fetch(request);
|
let response = await fetch(request);
|
||||||
|
|
||||||
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response');
|
||||||
|
|
||||||
if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) {
|
if (supportsResponseStream && (onDownloadProgress || isStreamResponse)) {
|
||||||
const options = {};
|
const options = {};
|
||||||
|
|
||||||
['status', 'statusText', 'headers'].forEach(prop => {
|
['status', 'statusText', 'headers'].forEach(prop => {
|
||||||
|
@ -22988,8 +22975,8 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
response = new Response(
|
response = new Response(
|
||||||
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => {
|
||||||
flush && flush();
|
flush && flush();
|
||||||
unsubscribe && unsubscribe();
|
isStreamResponse && onFinish();
|
||||||
}),
|
}, encodeText),
|
||||||
options
|
options
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -22998,7 +22985,9 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
|
|
||||||
let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config);
|
let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config);
|
||||||
|
|
||||||
!isStreamResponse && unsubscribe && unsubscribe();
|
!isStreamResponse && onFinish();
|
||||||
|
|
||||||
|
stopTimeout && stopTimeout();
|
||||||
|
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
settle(resolve, reject, {
|
settle(resolve, reject, {
|
||||||
|
@ -23011,7 +23000,7 @@ const fetchAdapter = isFetchSupported && (async (config) => {
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
unsubscribe && unsubscribe();
|
onFinish();
|
||||||
|
|
||||||
if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) {
|
if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) {
|
||||||
throw Object.assign(
|
throw Object.assign(
|
||||||
|
@ -23222,14 +23211,6 @@ validators$1.transitional = function transitional(validator, version, message) {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
validators$1.spelling = function spelling(correctSpelling) {
|
|
||||||
return (value, opt) => {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.warn(`${opt} is likely a misspelling of ${correctSpelling}`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assert object's properties type
|
* Assert object's properties type
|
||||||
*
|
*
|
||||||
|
@ -23299,9 +23280,9 @@ class Axios {
|
||||||
return await this._request(configOrUrl, config);
|
return await this._request(configOrUrl, config);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err instanceof Error) {
|
if (err instanceof Error) {
|
||||||
let dummy = {};
|
let dummy;
|
||||||
|
|
||||||
Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error());
|
Error.captureStackTrace ? Error.captureStackTrace(dummy = {}) : (dummy = new Error());
|
||||||
|
|
||||||
// slice off the Error: ... line
|
// slice off the Error: ... line
|
||||||
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : '';
|
||||||
|
@ -23356,18 +23337,6 @@ class Axios {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set config.allowAbsoluteUrls
|
|
||||||
if (config.allowAbsoluteUrls !== undefined) ; else if (this.defaults.allowAbsoluteUrls !== undefined) {
|
|
||||||
config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls;
|
|
||||||
} else {
|
|
||||||
config.allowAbsoluteUrls = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
validator.assertOptions(config, {
|
|
||||||
baseUrl: validators.spelling('baseURL'),
|
|
||||||
withXsrfToken: validators.spelling('withXSRFToken')
|
|
||||||
}, true);
|
|
||||||
|
|
||||||
// Set config.method
|
// Set config.method
|
||||||
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
config.method = (config.method || this.defaults.method || 'get').toLowerCase();
|
||||||
|
|
||||||
|
@ -23458,7 +23427,7 @@ class Axios {
|
||||||
|
|
||||||
getUri(config) {
|
getUri(config) {
|
||||||
config = mergeConfig(this.defaults, config);
|
config = mergeConfig(this.defaults, config);
|
||||||
const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls);
|
const fullPath = buildFullPath(config.baseURL, config.url);
|
||||||
return buildURL(fullPath, config.params, config.paramsSerializer);
|
return buildURL(fullPath, config.params, config.paramsSerializer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23598,20 +23567,6 @@ class CancelToken {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toAbortSignal() {
|
|
||||||
const controller = new AbortController();
|
|
||||||
|
|
||||||
const abort = (err) => {
|
|
||||||
controller.abort(err);
|
|
||||||
};
|
|
||||||
|
|
||||||
this.subscribe(abort);
|
|
||||||
|
|
||||||
controller.signal.unsubscribe = () => this.unsubscribe(abort);
|
|
||||||
|
|
||||||
return controller.signal;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
* Returns an object that contains a new `CancelToken` and a function that, when called,
|
||||||
* cancels the `CancelToken`.
|
* cancels the `CancelToken`.
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[tools]
|
|
||||||
node = "20"
|
|
8994
package-lock.json
generated
8994
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@kie/git-backporting",
|
"name": "@kie/git-backporting",
|
||||||
"version": "4.8.5",
|
"version": "4.8.2",
|
||||||
"description": "Git backporting is a tool to execute automatic pull request git backporting.",
|
"description": "Git backporting is a tool to execute automatic pull request git backporting.",
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
@ -55,7 +55,7 @@
|
||||||
"@gitbeaker/rest": "^39.1.0",
|
"@gitbeaker/rest": "^39.1.0",
|
||||||
"@kie/mock-github": "^1.1.0",
|
"@kie/mock-github": "^1.1.0",
|
||||||
"@octokit/webhooks-types": "^6.8.0",
|
"@octokit/webhooks-types": "^6.8.0",
|
||||||
"@release-it/conventional-changelog": "^10.0.0",
|
"@release-it/conventional-changelog": "^7.0.0",
|
||||||
"@types/fs-extra": "^9.0.13",
|
"@types/fs-extra": "^9.0.13",
|
||||||
"@types/jest": "^29.2.4",
|
"@types/jest": "^29.2.4",
|
||||||
"@types/node": "^18.11.17",
|
"@types/node": "^18.11.17",
|
||||||
|
@ -66,7 +66,7 @@
|
||||||
"husky": "^8.0.2",
|
"husky": "^8.0.2",
|
||||||
"jest": "^29.0.0",
|
"jest": "^29.0.0",
|
||||||
"jest-sonar-reporter": "^2.0.0",
|
"jest-sonar-reporter": "^2.0.0",
|
||||||
"release-it": "^18.1.2",
|
"release-it": "^16.1.3",
|
||||||
"semver": "^7.3.8",
|
"semver": "^7.3.8",
|
||||||
"ts-jest": "^29.0.0",
|
"ts-jest": "^29.0.0",
|
||||||
"ts-node": "^10.8.1",
|
"ts-node": "^10.8.1",
|
||||||
|
|
|
@ -68,15 +68,6 @@ export default class GitCLIService {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
this.logger.info(`Folder ${to} already exist. Won't clone`);
|
||||||
|
|
||||||
// ensure the working tree is properly reset - no stale changes
|
|
||||||
// from previous (failed) backport
|
|
||||||
const ongoingCherryPick = await this.anyConflict(to);
|
|
||||||
if (ongoingCherryPick) {
|
|
||||||
this.logger.warn("Found previously failed cherry-pick, aborting it");
|
|
||||||
await this.git(to).raw(["cherry-pick", "--abort"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkout to the proper branch
|
// checkout to the proper branch
|
||||||
this.logger.info(`Checking out branch ${branch}`);
|
this.logger.info(`Checking out branch ${branch}`);
|
||||||
await this.git(to).checkout(branch);
|
await this.git(to).checkout(branch);
|
||||||
|
@ -140,21 +131,6 @@ export default class GitCLIService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether there are some conflicts in the current working directory
|
|
||||||
* which means there is an ongoing cherry-pick that did not complete successfully
|
|
||||||
* @param cwd repository in which the check should be performed
|
|
||||||
* @return true if there is some conflict, false otherwise
|
|
||||||
*/
|
|
||||||
async anyConflict(cwd: string): Promise<boolean> {
|
|
||||||
const status = await this.git(cwd).status();
|
|
||||||
if (status.conflicted.length > 0) {
|
|
||||||
this.logger.debug(`Found conflicts in branch ${status.current}`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Push a branch to a remote
|
* Push a branch to a remote
|
||||||
* @param cwd repository in which the push should be performed
|
* @param cwd repository in which the push should be performed
|
||||||
|
|
|
@ -45,17 +45,17 @@ export const inferGitApiUrl = (prUrl: string, apiVersion = "v4"): string => {
|
||||||
/**
|
/**
|
||||||
* Infer the value of the squash option
|
* Infer the value of the squash option
|
||||||
* @param open true if the pull/merge request is still open
|
* @param open true if the pull/merge request is still open
|
||||||
* @param squash_commit undefined or null if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
* @param squash_commit undefined if the pull/merge request was merged, the sha of the squashed commit if it was squashed
|
||||||
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
* @returns true if a single commit must be cherry-picked, false if all merged commits must be cherry-picked
|
||||||
*/
|
*/
|
||||||
export const inferSquash = (open: boolean, squash_commit: string | undefined | null): boolean => {
|
export const inferSquash = (open: boolean, squash_commit: string | undefined): boolean => {
|
||||||
const logger = LoggerServiceFactory.getLogger();
|
const logger = LoggerServiceFactory.getLogger();
|
||||||
|
|
||||||
if (open) {
|
if (open) {
|
||||||
logger.debug("cherry-pick all commits because they have not been merged (or squashed) in the base branch yet");
|
logger.debug("cherry-pick all commits because they have not been merged (or squashed) in the base branch yet");
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
if (squash_commit) {
|
if (squash_commit !== undefined) {
|
||||||
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
logger.debug(`cherry-pick the squashed commit ${squash_commit}`);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -48,9 +48,7 @@ export default class GitLabClient implements GitClient {
|
||||||
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
// example: <host>/api/v4/projects/<namespace>%2Fbackporting-example/merge_requests/1
|
||||||
async getPullRequest(namespace: string, repo: string, mrNumber: number, squash: boolean | undefined): Promise<GitPullRequest> {
|
async getPullRequest(namespace: string, repo: string, mrNumber: number, squash: boolean | undefined): Promise<GitPullRequest> {
|
||||||
const projectId = this.getProjectId(namespace, repo);
|
const projectId = this.getProjectId(namespace, repo);
|
||||||
const url = `/projects/${projectId}/merge_requests/${mrNumber}`;
|
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${mrNumber}`);
|
||||||
this.logger.debug(`Fetching pull request ${url}`);
|
|
||||||
const { data } = await this.client.get(`${url}`);
|
|
||||||
|
|
||||||
if (squash === undefined) {
|
if (squash === undefined) {
|
||||||
squash = inferSquash(data.state === "opened", data.squash_commit_sha);
|
squash = inferSquash(data.state === "opened", data.squash_commit_sha);
|
||||||
|
@ -171,7 +169,7 @@ export default class GitLabClient implements GitClient {
|
||||||
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
const { namespace, project, id } = this.extractMergeRequestData(mrUrl);
|
||||||
const projectId = this.getProjectId(namespace, project);
|
const projectId = this.getProjectId(namespace, project);
|
||||||
|
|
||||||
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${id}/notes`, {
|
const { data } = await this.client.post(`/projects/${projectId}/issues/${id}/notes`, {
|
||||||
body: comment,
|
body: comment,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -59,6 +59,5 @@ describe("check git utilities", () => {
|
||||||
expect(inferSquash(true, undefined)).toStrictEqual(false);
|
expect(inferSquash(true, undefined)).toStrictEqual(false);
|
||||||
expect(inferSquash(false, "SHA")).toStrictEqual(true);
|
expect(inferSquash(false, "SHA")).toStrictEqual(true);
|
||||||
expect(inferSquash(false, undefined)).toStrictEqual(false);
|
expect(inferSquash(false, undefined)).toStrictEqual(false);
|
||||||
expect(inferSquash(false, null)).toStrictEqual(false);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -604,50 +604,6 @@ describe("cli runner", () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("merged MR with --auto-no-squash", async () => {
|
|
||||||
addProcessArgs([
|
|
||||||
"-tb",
|
|
||||||
"target",
|
|
||||||
"-pr",
|
|
||||||
"https://my.gitlab.host.com/superuser/backporting-example/-/merge_requests/5",
|
|
||||||
"--auto-no-squash",
|
|
||||||
]);
|
|
||||||
|
|
||||||
await runner.execute();
|
|
||||||
|
|
||||||
const cwd = process.cwd() + "/bp";
|
|
||||||
|
|
||||||
expect(GitClientFactory.getOrCreate).toBeCalledTimes(1);
|
|
||||||
expect(GitClientFactory.getOrCreate).toBeCalledWith(GitClientType.GITLAB, undefined, "https://my.gitlab.host.com/api/v4");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.clone).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.clone).toBeCalledWith("https://my.gitlab.host.com/superuser/backporting-example.git", cwd, "target");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.createLocalBranch).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.createLocalBranch).toBeCalledWith(cwd, "bp-target-e4dd336");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.cherryPick).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.cherryPick).toBeCalledWith(cwd, "e4dd336a4a20f394df6665994df382fb1d193a11", undefined, undefined, undefined);
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.push).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.push).toBeCalledWith(cwd, "bp-target-e4dd336");
|
|
||||||
|
|
||||||
expect(GitLabClient.prototype.createPullRequest).toBeCalledTimes(1);
|
|
||||||
expect(GitLabClient.prototype.createPullRequest).toBeCalledWith({
|
|
||||||
owner: "superuser",
|
|
||||||
repo: "backporting-example",
|
|
||||||
head: "bp-target-e4dd336",
|
|
||||||
base: "target",
|
|
||||||
title: "[target] Update test.txt",
|
|
||||||
body: expect.stringContaining("**Backport:** https://my.gitlab.host.com/superuser/backporting-example/-/merge_requests/5"),
|
|
||||||
reviewers: ["superuser"],
|
|
||||||
assignees: [],
|
|
||||||
labels: [],
|
|
||||||
comments: [],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("auth using GITLAB_TOKEN takes precedence over GIT_TOKEN env variable", async () => {
|
test("auth using GITLAB_TOKEN takes precedence over GIT_TOKEN env variable", async () => {
|
||||||
process.env[AuthTokenId.GIT_TOKEN] = "mygittoken";
|
process.env[AuthTokenId.GIT_TOKEN] = "mygittoken";
|
||||||
process.env[AuthTokenId.GITLAB_TOKEN] = "mygitlabtoken";
|
process.env[AuthTokenId.GITLAB_TOKEN] = "mygitlabtoken";
|
||||||
|
|
|
@ -519,46 +519,4 @@ describe("gha runner", () => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("merged MR with auto-no-squash", async () => {
|
|
||||||
spyGetInput({
|
|
||||||
"target-branch": "target",
|
|
||||||
"pull-request": "https://my.gitlab.host.com/superuser/backporting-example/-/merge_requests/5",
|
|
||||||
"auto-no-squash": "true",
|
|
||||||
});
|
|
||||||
|
|
||||||
await runner.execute();
|
|
||||||
|
|
||||||
const cwd = process.cwd() + "/bp";
|
|
||||||
|
|
||||||
expect(GitClientFactory.getOrCreate).toBeCalledTimes(1);
|
|
||||||
expect(GitClientFactory.getOrCreate).toBeCalledWith(GitClientType.GITLAB, undefined, "https://my.gitlab.host.com/api/v4");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.clone).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.clone).toBeCalledWith("https://my.gitlab.host.com/superuser/backporting-example.git", cwd, "target");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.createLocalBranch).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.createLocalBranch).toBeCalledWith(cwd, "bp-target-e4dd336");
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.cherryPick).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.cherryPick).toBeCalledWith(cwd, "e4dd336a4a20f394df6665994df382fb1d193a11", undefined, undefined, undefined);
|
|
||||||
|
|
||||||
expect(GitCLIService.prototype.push).toBeCalledTimes(1);
|
|
||||||
expect(GitCLIService.prototype.push).toBeCalledWith(cwd, "bp-target-e4dd336");
|
|
||||||
|
|
||||||
expect(GitLabClient.prototype.createPullRequest).toBeCalledTimes(1);
|
|
||||||
expect(GitLabClient.prototype.createPullRequest).toBeCalledWith({
|
|
||||||
owner: "superuser",
|
|
||||||
repo: "backporting-example",
|
|
||||||
head: "bp-target-e4dd336",
|
|
||||||
base: "target",
|
|
||||||
title: "[target] Update test.txt",
|
|
||||||
body: expect.stringContaining("**Backport:** https://my.gitlab.host.com/superuser/backporting-example/-/merge_requests/5"),
|
|
||||||
reviewers: ["superuser"],
|
|
||||||
assignees: [],
|
|
||||||
labels: [],
|
|
||||||
comments: [],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
});
|
|
@ -1,7 +1,7 @@
|
||||||
import LoggerServiceFactory from "@bp/service/logger/logger-service-factory";
|
import LoggerServiceFactory from "@bp/service/logger/logger-service-factory";
|
||||||
import { Moctokit } from "@kie/mock-github";
|
import { Moctokit } from "@kie/mock-github";
|
||||||
import { TARGET_OWNER, REPO, MERGED_PR_FIXTURE, OPEN_PR_FIXTURE, NOT_MERGED_PR_FIXTURE, NOT_FOUND_PR_NUMBER, MULT_COMMITS_PR_FIXTURE, MULT_COMMITS_PR_COMMITS, NEW_PR_URL, NEW_PR_NUMBER, GITHUB_GET_COMMIT } from "./github-data";
|
import { TARGET_OWNER, REPO, MERGED_PR_FIXTURE, OPEN_PR_FIXTURE, NOT_MERGED_PR_FIXTURE, NOT_FOUND_PR_NUMBER, MULT_COMMITS_PR_FIXTURE, MULT_COMMITS_PR_COMMITS, NEW_PR_URL, NEW_PR_NUMBER, GITHUB_GET_COMMIT } from "./github-data";
|
||||||
import { CLOSED_NOT_MERGED_MR, MERGED_SQUASHED_MR, NESTED_NAMESPACE_MR, OPEN_MR, OPEN_PR_COMMITS, PROJECT_EXAMPLE, NESTED_PROJECT_EXAMPLE, SUPERUSER, MERGED_SQUASHED_MR_COMMITS, MERGED_NOT_SQUASHED_MR, MERGED_NOT_SQUASHED_MR_COMMITS } from "./gitlab-data";
|
import { CLOSED_NOT_MERGED_MR, MERGED_SQUASHED_MR, NESTED_NAMESPACE_MR, OPEN_MR, OPEN_PR_COMMITS, PROJECT_EXAMPLE, NESTED_PROJECT_EXAMPLE, SUPERUSER, MERGED_SQUASHED_MR_COMMITS } from "./gitlab-data";
|
||||||
import { CB_TARGET_OWNER, CB_REPO, CB_MERGED_PR_FIXTURE, CB_OPEN_PR_FIXTURE, CB_NOT_MERGED_PR_FIXTURE, CB_NOT_FOUND_PR_NUMBER, CB_MULT_COMMITS_PR_FIXTURE, CB_MULT_COMMITS_PR_COMMITS, CB_NEW_PR_URL, CB_NEW_PR_NUMBER, CODEBERG_GET_COMMIT } from "./codeberg-data";
|
import { CB_TARGET_OWNER, CB_REPO, CB_MERGED_PR_FIXTURE, CB_OPEN_PR_FIXTURE, CB_NOT_MERGED_PR_FIXTURE, CB_NOT_FOUND_PR_NUMBER, CB_MULT_COMMITS_PR_FIXTURE, CB_MULT_COMMITS_PR_COMMITS, CB_NEW_PR_URL, CB_NEW_PR_NUMBER, CODEBERG_GET_COMMIT } from "./codeberg-data";
|
||||||
|
|
||||||
// high number, for each test we are not expecting
|
// high number, for each test we are not expecting
|
||||||
|
@ -25,8 +25,6 @@ export const getAxiosMocked = (url: string) => {
|
||||||
data = CLOSED_NOT_MERGED_MR;
|
data = CLOSED_NOT_MERGED_MR;
|
||||||
} else if (url.endsWith("merge_requests/4")) {
|
} else if (url.endsWith("merge_requests/4")) {
|
||||||
data = NESTED_NAMESPACE_MR;
|
data = NESTED_NAMESPACE_MR;
|
||||||
} else if (url.endsWith("merge_requests/5")) {
|
|
||||||
data = MERGED_NOT_SQUASHED_MR;
|
|
||||||
} else if (url.endsWith("projects/76316")) {
|
} else if (url.endsWith("projects/76316")) {
|
||||||
data = PROJECT_EXAMPLE;
|
data = PROJECT_EXAMPLE;
|
||||||
} else if (url.endsWith("projects/1645")) {
|
} else if (url.endsWith("projects/1645")) {
|
||||||
|
@ -37,8 +35,6 @@ export const getAxiosMocked = (url: string) => {
|
||||||
data = MERGED_SQUASHED_MR_COMMITS;
|
data = MERGED_SQUASHED_MR_COMMITS;
|
||||||
} else if (url.endsWith("merge_requests/2/commits")) {
|
} else if (url.endsWith("merge_requests/2/commits")) {
|
||||||
data = OPEN_PR_COMMITS;
|
data = OPEN_PR_COMMITS;
|
||||||
} else if (url.endsWith("merge_requests/5/commits")) {
|
|
||||||
data = MERGED_NOT_SQUASHED_MR_COMMITS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -755,29 +755,6 @@ export const OPEN_PR_COMMITS = [
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
export const MERGED_NOT_SQUASHED_MR_COMMITS = [
|
|
||||||
{
|
|
||||||
"id":"e4dd336a4a20f394df6665994df382fb1d193a11",
|
|
||||||
"short_id":"e4dd336a",
|
|
||||||
"created_at":"2023-06-29T09:59:10.000Z",
|
|
||||||
"parent_ids":[
|
|
||||||
|
|
||||||
],
|
|
||||||
"title":"Add new file",
|
|
||||||
"message":"Add new file",
|
|
||||||
"author_name":"Super User",
|
|
||||||
"author_email":"superuser@email.com",
|
|
||||||
"authored_date":"2023-06-29T09:59:10.000Z",
|
|
||||||
"committer_name":"Super User",
|
|
||||||
"committer_email":"superuser@email.com",
|
|
||||||
"committed_date":"2023-06-29T09:59:10.000Z",
|
|
||||||
"trailers":{
|
|
||||||
|
|
||||||
},
|
|
||||||
"web_url":"https://gitlab.com/superuser/backporting-example/-/commit/e4dd336a4a20f394df6665994df382fb1d193a11"
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const SUPERUSER = {
|
export const SUPERUSER = {
|
||||||
"id":14041,
|
"id":14041,
|
||||||
"username":"superuser",
|
"username":"superuser",
|
||||||
|
@ -921,138 +898,3 @@ export const NESTED_NAMESPACE_MR = {
|
||||||
"can_merge":true
|
"can_merge":true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const MERGED_NOT_SQUASHED_MR = {
|
|
||||||
"id":807106,
|
|
||||||
"iid":1,
|
|
||||||
"project_id":76316,
|
|
||||||
"title":"Update test.txt",
|
|
||||||
"description":"This is the body",
|
|
||||||
"state":"merged",
|
|
||||||
"created_at":"2023-06-28T14:32:40.943Z",
|
|
||||||
"updated_at":"2023-06-28T14:37:12.108Z",
|
|
||||||
"merged_by":{
|
|
||||||
"id":14041,
|
|
||||||
"username":"superuser",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
},
|
|
||||||
"merge_user":{
|
|
||||||
"id":14041,
|
|
||||||
"username":"superuser",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
},
|
|
||||||
"merged_at":"2023-06-28T14:37:11.667Z",
|
|
||||||
"closed_by":null,
|
|
||||||
"closed_at":null,
|
|
||||||
"target_branch":"main",
|
|
||||||
"source_branch":"feature",
|
|
||||||
"user_notes_count":0,
|
|
||||||
"upvotes":0,
|
|
||||||
"downvotes":0,
|
|
||||||
"author":{
|
|
||||||
"id":14041,
|
|
||||||
"username":"superuser",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
},
|
|
||||||
"assignees":[
|
|
||||||
{
|
|
||||||
"id":14041,
|
|
||||||
"username":"superuser",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"assignee":{
|
|
||||||
"id":14041,
|
|
||||||
"username":"superuser",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
},
|
|
||||||
"reviewers":[
|
|
||||||
{
|
|
||||||
"id":1404188,
|
|
||||||
"username":"superuser1",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id":1404199,
|
|
||||||
"username":"superuser2",
|
|
||||||
"name":"Super User",
|
|
||||||
"state":"active",
|
|
||||||
"avatar_url":"https://my.gitlab.host.com/uploads/-/system/user/avatar/14041/avatar.png",
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source_project_id":76316,
|
|
||||||
"target_project_id":76316,
|
|
||||||
"labels":[
|
|
||||||
"backport-prod"
|
|
||||||
],
|
|
||||||
"draft":false,
|
|
||||||
"work_in_progress":false,
|
|
||||||
"milestone":null,
|
|
||||||
"merge_when_pipeline_succeeds":false,
|
|
||||||
"merge_status":"can_be_merged",
|
|
||||||
"detailed_merge_status":"not_open",
|
|
||||||
"sha":"9e15674ebd48e05c6e428a1fa31dbb60a778d644",
|
|
||||||
"merge_commit_sha":"4d369c3e9a8d1d5b7e56c892a8ab2a7666583ac3",
|
|
||||||
"squash_commit_sha":null,
|
|
||||||
"discussion_locked":null,
|
|
||||||
"should_remove_source_branch":true,
|
|
||||||
"force_remove_source_branch":true,
|
|
||||||
"reference":"!5",
|
|
||||||
"references":{
|
|
||||||
"short":"!5",
|
|
||||||
"relative":"!5",
|
|
||||||
"full":"superuser/backporting-example!5"
|
|
||||||
},
|
|
||||||
"web_url":"https://my.gitlab.host.com/superuser/backporting-example/-/merge_requests/5",
|
|
||||||
"time_stats":{
|
|
||||||
"time_estimate":0,
|
|
||||||
"total_time_spent":0,
|
|
||||||
"human_time_estimate":null,
|
|
||||||
"human_total_time_spent":null
|
|
||||||
},
|
|
||||||
"squash":false,
|
|
||||||
"squash_on_merge":false,
|
|
||||||
"task_completion_status":{
|
|
||||||
"count":0,
|
|
||||||
"completed_count":0
|
|
||||||
},
|
|
||||||
"has_conflicts":false,
|
|
||||||
"blocking_discussions_resolved":true,
|
|
||||||
"approvals_before_merge":null,
|
|
||||||
"subscribed":true,
|
|
||||||
"changes_count":"1",
|
|
||||||
"latest_build_started_at":null,
|
|
||||||
"latest_build_finished_at":null,
|
|
||||||
"first_deployed_to_production_at":null,
|
|
||||||
"pipeline":null,
|
|
||||||
"head_pipeline":null,
|
|
||||||
"diff_refs":{
|
|
||||||
"base_sha":"2c553a0c4c133a51806badce5fa4842b7253cb3b",
|
|
||||||
"head_sha":"9e15674ebd48e05c6e428a1fa31dbb60a778d644",
|
|
||||||
"start_sha":"2c553a0c4c133a51806badce5fa4842b7253cb3b"
|
|
||||||
},
|
|
||||||
"merge_error":null,
|
|
||||||
"first_contribution":false,
|
|
||||||
"user":{
|
|
||||||
"can_merge":true
|
|
||||||
}
|
|
||||||
};
|
|
Loading…
Add table
Reference in a new issue