Merge remote-tracking branch 'upstream/main'

* upstream/main: (33 commits)
  Fix virtual-env toolcache links
  Updated @actions/cache (#382)
  ci(workflow): add 'npm' cache for actions/setup-node in .github/workflows (#379)
  Cache hit output (#373)
  Add pyton-version to setup PyPy output (#365)
  Rework pipenv caching test (#375)
  Update README.md to fix setup-python version  in example (#368)
  dist fix (#367)
  Cache on ghes (#363)
  Update dist
  Use `\n` instead of `os.EOL`
  Update dist
  Initialise pyproject.toml
  Build and format
  Remove console.log
  Remove unused file
  Reduce test matrix
  Parse values from poetry
  Release
  Add more tests
  ...
This commit is contained in:
Andrew DiLosa 2022-04-19 16:05:04 -07:00
commit a200bffc82
29 changed files with 14395 additions and 2324 deletions

View file

@ -1148,6 +1148,11 @@ function assertDefined(name, value) {
return value;
}
exports.assertDefined = assertDefined;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
}
exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map
/***/ }),
@ -3728,10 +3733,7 @@ const options_1 = __webpack_require__(538);
const requestUtils_1 = __webpack_require__(899);
const versionSalt = '1.0';
function getCacheApiUrl(resource) {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RUNTIME_URL'] ||
'').replace('pipelines', 'artifactcache');
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.');
}
@ -3809,18 +3811,18 @@ function downloadCache(archiveLocation, archivePath, options) {
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key, paths, options) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
const reserveCacheRequest = {
key,
version
version,
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
};
const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
}));
return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
return response;
});
}
exports.reserveCache = reserveCache;
@ -5920,7 +5922,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
//
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
// on 64-bit systems), split the download into multiple segments
const maxSegmentSize = buffer.constants.MAX_LENGTH;
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs.openSync(archivePath, 'w');
try {
@ -37180,6 +37183,25 @@ Object.defineProperty(exports, "__esModule", { value: true });
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@ -37189,14 +37211,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.State = void 0;
const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470));
var State;
@ -37223,15 +37239,19 @@ class CacheDistributor {
core.saveState(State.CACHE_PATHS, cachePath);
core.saveState(State.STATE_CACHE_PRIMARY_KEY, primaryKey);
const matchedKey = yield cache.restoreCache(cachePath, primaryKey, restoreKey);
if (matchedKey) {
core.saveState(State.CACHE_MATCHED_KEY, matchedKey);
core.info(`Cache restored from key: ${matchedKey}`);
}
else {
core.info(`${this.packageManager} cache is not found`);
}
this.handleMatchResult(matchedKey, primaryKey);
});
}
handleMatchResult(matchedKey, primaryKey) {
if (matchedKey) {
core.saveState(State.CACHE_MATCHED_KEY, matchedKey);
core.info(`Cache restored from key: ${matchedKey}`);
}
else {
core.info(`${this.packageManager} cache is not found`);
}
core.setOutput('cache-hit', matchedKey === primaryKey);
}
}
exports.default = CacheDistributor;
@ -41438,6 +41458,15 @@ function checkKey(key) {
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
}
}
/**
* isFeatureAvailable to check the presence of Actions cache service
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/
function isFeatureAvailable() {
return !!process.env['ACTIONS_CACHE_URL'];
}
exports.isFeatureAvailable = isFeatureAvailable;
/**
* Restores cache from keys
*
@ -41504,18 +41533,12 @@ exports.restoreCache = restoreCache;
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
function saveCache(paths, key, options) {
var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths);
checkKey(key);
const compressionMethod = yield utils.getCompressionMethod();
core.debug('Reserving Cache');
const cacheId = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod
});
if (cacheId === -1) {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);
}
core.debug(`Cache ID: ${cacheId}`);
let cacheId = null;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
@ -41530,9 +41553,24 @@ function saveCache(paths, key, options) {
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod,
cacheSize: archiveFileSize
});
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;
}
else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {
throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);
}
else {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
@ -45804,6 +45842,25 @@ module.exports = require("stream");
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@ -45813,17 +45870,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.run = void 0;
const core = __importStar(__webpack_require__(470));
const cache = __importStar(__webpack_require__(692));
const fs_1 = __importDefault(__webpack_require__(747));
@ -50298,7 +50349,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry
return {
statusCode: error.statusCode,
result: null,
headers: {}
headers: {},
error
};
}
else {

3906
dist/setup/index.js vendored

File diff suppressed because one or more lines are too long