Caching on GHES (#308)
* initial changes * review comments * updated with correct message * linting * update version * updated version
This commit is contained in:
parent
e886040dc2
commit
dc1a9f2791
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
@ -1,4 +1,9 @@
|
|||||||
import { isVersionSatisfies } from '../src/util';
|
import * as cache from '@actions/cache';
|
||||||
|
import * as core from '@actions/core';
|
||||||
|
import { isVersionSatisfies, isCacheFeatureAvailable } from '../src/util';
|
||||||
|
|
||||||
|
jest.mock('@actions/cache');
|
||||||
|
jest.mock('@actions/core');
|
||||||
|
|
||||||
describe('isVersionSatisfies', () => {
|
describe('isVersionSatisfies', () => {
|
||||||
it.each([
|
it.each([
|
||||||
@ -20,3 +25,38 @@ describe('isVersionSatisfies', () => {
|
|||||||
expect(actual).toBe(expected);
|
expect(actual).toBe(expected);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('isCacheFeatureAvailable', () => {
|
||||||
|
it('isCacheFeatureAvailable disabled on GHES', () => {
|
||||||
|
jest.spyOn(cache, 'isFeatureAvailable').mockImplementation(() => false);
|
||||||
|
try {
|
||||||
|
process.env['GITHUB_SERVER_URL'] = 'http://example.com';
|
||||||
|
isCacheFeatureAvailable();
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toHaveProperty(
|
||||||
|
'message',
|
||||||
|
'Caching is only supported on GHES version >= 3.5. If you are on a version >= 3.5, please check with your GHES admin if the Actions cache service is enabled or not.'
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
delete process.env['GITHUB_SERVER_URL'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('isCacheFeatureAvailable disabled on dotcom', () => {
|
||||||
|
jest.spyOn(cache, 'isFeatureAvailable').mockImplementation(() => false);
|
||||||
|
const infoMock = jest.spyOn(core, 'warning');
|
||||||
|
const message = 'The runner was not able to contact the cache service. Caching will be skipped';
|
||||||
|
try {
|
||||||
|
process.env['GITHUB_SERVER_URL'] = 'http://github.com';
|
||||||
|
expect(isCacheFeatureAvailable()).toBe(false);
|
||||||
|
expect(infoMock).toHaveBeenCalledWith(message);
|
||||||
|
} finally {
|
||||||
|
delete process.env['GITHUB_SERVER_URL'];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('isCacheFeatureAvailable is enabled', () => {
|
||||||
|
jest.spyOn(cache, 'isFeatureAvailable').mockImplementation(() => true);
|
||||||
|
expect(isCacheFeatureAvailable()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
136
dist/cleanup/index.js
vendored
136
dist/cleanup/index.js
vendored
@ -3730,10 +3730,7 @@ const options_1 = __webpack_require__(538);
|
|||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
const versionSalt = '1.0';
|
const versionSalt = '1.0';
|
||||||
function getCacheApiUrl(resource) {
|
function getCacheApiUrl(resource) {
|
||||||
// Ideally we just use ACTIONS_CACHE_URL
|
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
||||||
const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||
|
|
||||||
process.env['ACTIONS_RUNTIME_URL'] ||
|
|
||||||
'').replace('pipelines', 'artifactcache');
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||||
}
|
}
|
||||||
@ -5932,7 +5929,35 @@ module.exports = {
|
|||||||
/* 193 */,
|
/* 193 */,
|
||||||
/* 194 */,
|
/* 194 */,
|
||||||
/* 195 */,
|
/* 195 */,
|
||||||
/* 196 */,
|
/* 196 */
|
||||||
|
/***/ (function(__unusedmodule, exports) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
|
||||||
|
exports.MACOS_JAVA_CONTENT_POSTFIX = 'Contents/Home';
|
||||||
|
exports.INPUT_JAVA_VERSION = 'java-version';
|
||||||
|
exports.INPUT_ARCHITECTURE = 'architecture';
|
||||||
|
exports.INPUT_JAVA_PACKAGE = 'java-package';
|
||||||
|
exports.INPUT_DISTRIBUTION = 'distribution';
|
||||||
|
exports.INPUT_JDK_FILE = 'jdkFile';
|
||||||
|
exports.INPUT_CHECK_LATEST = 'check-latest';
|
||||||
|
exports.INPUT_SERVER_ID = 'server-id';
|
||||||
|
exports.INPUT_SERVER_USERNAME = 'server-username';
|
||||||
|
exports.INPUT_SERVER_PASSWORD = 'server-password';
|
||||||
|
exports.INPUT_SETTINGS_PATH = 'settings-path';
|
||||||
|
exports.INPUT_OVERWRITE_SETTINGS = 'overwrite-settings';
|
||||||
|
exports.INPUT_GPG_PRIVATE_KEY = 'gpg-private-key';
|
||||||
|
exports.INPUT_GPG_PASSPHRASE = 'gpg-passphrase';
|
||||||
|
exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = undefined;
|
||||||
|
exports.INPUT_DEFAULT_GPG_PASSPHRASE = 'GPG_PASSPHRASE';
|
||||||
|
exports.INPUT_CACHE = 'cache';
|
||||||
|
exports.INPUT_JOB_STATUS = 'job-status';
|
||||||
|
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
/* 197 */
|
/* 197 */
|
||||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||||
|
|
||||||
@ -6092,7 +6117,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
exports.run = void 0;
|
exports.run = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const gpg = __importStar(__webpack_require__(884));
|
const gpg = __importStar(__webpack_require__(884));
|
||||||
const constants = __importStar(__webpack_require__(694));
|
const constants = __importStar(__webpack_require__(196));
|
||||||
const util_1 = __webpack_require__(322);
|
const util_1 = __webpack_require__(322);
|
||||||
const cache_1 = __webpack_require__(913);
|
const cache_1 = __webpack_require__(913);
|
||||||
function removePrivateKeyFromKeychain() {
|
function removePrivateKeyFromKeychain() {
|
||||||
@ -6889,7 +6914,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
//
|
//
|
||||||
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
||||||
// on 64-bit systems), split the download into multiple segments
|
// on 64-bit systems), split the download into multiple segments
|
||||||
const maxSegmentSize = buffer.constants.MAX_LENGTH;
|
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
|
||||||
|
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
|
||||||
const downloadProgress = new DownloadProgress(contentLength);
|
const downloadProgress = new DownloadProgress(contentLength);
|
||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
@ -9110,14 +9136,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isJobStatusSuccess = exports.getToolcachePath = exports.isVersionSatisfies = exports.getDownloadArchiveExtension = exports.extractJdkFile = exports.getVersionFromToolcachePath = exports.getBooleanInput = exports.getTempDir = void 0;
|
exports.isCacheFeatureAvailable = exports.isGhes = exports.isJobStatusSuccess = exports.getToolcachePath = exports.isVersionSatisfies = exports.getDownloadArchiveExtension = exports.extractJdkFile = exports.getVersionFromToolcachePath = exports.getBooleanInput = exports.getTempDir = void 0;
|
||||||
const os_1 = __importDefault(__webpack_require__(87));
|
const os_1 = __importDefault(__webpack_require__(87));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const semver = __importStar(__webpack_require__(876));
|
const semver = __importStar(__webpack_require__(876));
|
||||||
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(533));
|
const tc = __importStar(__webpack_require__(533));
|
||||||
const constants_1 = __webpack_require__(694);
|
const constants_1 = __webpack_require__(196);
|
||||||
function getTempDir() {
|
function getTempDir() {
|
||||||
let tempDirectory = process.env['RUNNER_TEMP'] || os_1.default.tmpdir();
|
let tempDirectory = process.env['RUNNER_TEMP'] || os_1.default.tmpdir();
|
||||||
return tempDirectory;
|
return tempDirectory;
|
||||||
@ -9187,6 +9214,24 @@ function isJobStatusSuccess() {
|
|||||||
return jobStatus === 'success';
|
return jobStatus === 'success';
|
||||||
}
|
}
|
||||||
exports.isJobStatusSuccess = isJobStatusSuccess;
|
exports.isJobStatusSuccess = isJobStatusSuccess;
|
||||||
|
function isGhes() {
|
||||||
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
|
}
|
||||||
|
exports.isGhes = isGhes;
|
||||||
|
function isCacheFeatureAvailable() {
|
||||||
|
if (!cache.isFeatureAvailable()) {
|
||||||
|
if (isGhes()) {
|
||||||
|
throw new Error('Caching is only supported on GHES version >= 3.5. If you are on a version >= 3.5, please check with your GHES admin if the Actions cache service is enabled or not.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning('The runner was not able to contact the cache service. Caching will be skipped');
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
exports.isCacheFeatureAvailable = isCacheFeatureAvailable;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -41953,7 +41998,7 @@ __exportStar(__webpack_require__(220), exports);
|
|||||||
__exportStar(__webpack_require__(932), exports);
|
__exportStar(__webpack_require__(932), exports);
|
||||||
__exportStar(__webpack_require__(975), exports);
|
__exportStar(__webpack_require__(975), exports);
|
||||||
__exportStar(__webpack_require__(207), exports);
|
__exportStar(__webpack_require__(207), exports);
|
||||||
__exportStar(__webpack_require__(773), exports);
|
__exportStar(__webpack_require__(694), exports);
|
||||||
__exportStar(__webpack_require__(695), exports);
|
__exportStar(__webpack_require__(695), exports);
|
||||||
var spancontext_utils_1 = __webpack_require__(629);
|
var spancontext_utils_1 = __webpack_require__(629);
|
||||||
Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } });
|
Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } });
|
||||||
@ -53506,6 +53551,15 @@ function checkKey(key) {
|
|||||||
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
|
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* isFeatureAvailable to check the presence of Actions cache service
|
||||||
|
*
|
||||||
|
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||||
|
*/
|
||||||
|
function isFeatureAvailable() {
|
||||||
|
return !!process.env['ACTIONS_CACHE_URL'];
|
||||||
|
}
|
||||||
|
exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
/**
|
/**
|
||||||
* Restores cache from keys
|
* Restores cache from keys
|
||||||
*
|
*
|
||||||
@ -53626,28 +53680,23 @@ exports.saveCache = saveCache;
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Copyright The OpenTelemetry Authors
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
|
//# sourceMappingURL=tracer_provider.js.map
|
||||||
exports.MACOS_JAVA_CONTENT_POSTFIX = 'Contents/Home';
|
|
||||||
exports.INPUT_JAVA_VERSION = 'java-version';
|
|
||||||
exports.INPUT_ARCHITECTURE = 'architecture';
|
|
||||||
exports.INPUT_JAVA_PACKAGE = 'java-package';
|
|
||||||
exports.INPUT_DISTRIBUTION = 'distribution';
|
|
||||||
exports.INPUT_JDK_FILE = 'jdkFile';
|
|
||||||
exports.INPUT_CHECK_LATEST = 'check-latest';
|
|
||||||
exports.INPUT_SERVER_ID = 'server-id';
|
|
||||||
exports.INPUT_SERVER_USERNAME = 'server-username';
|
|
||||||
exports.INPUT_SERVER_PASSWORD = 'server-password';
|
|
||||||
exports.INPUT_SETTINGS_PATH = 'settings-path';
|
|
||||||
exports.INPUT_OVERWRITE_SETTINGS = 'overwrite-settings';
|
|
||||||
exports.INPUT_GPG_PRIVATE_KEY = 'gpg-private-key';
|
|
||||||
exports.INPUT_GPG_PASSPHRASE = 'gpg-passphrase';
|
|
||||||
exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = undefined;
|
|
||||||
exports.INPUT_DEFAULT_GPG_PASSPHRASE = 'GPG_PASSPHRASE';
|
|
||||||
exports.INPUT_CACHE = 'cache';
|
|
||||||
exports.INPUT_JOB_STATUS = 'job-status';
|
|
||||||
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
/* 695 */
|
/* 695 */
|
||||||
@ -55714,30 +55763,7 @@ module.exports = function(dst, src) {
|
|||||||
/* 770 */,
|
/* 770 */,
|
||||||
/* 771 */,
|
/* 771 */,
|
||||||
/* 772 */,
|
/* 772 */,
|
||||||
/* 773 */
|
/* 773 */,
|
||||||
/***/ (function(__unusedmodule, exports) {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Copyright The OpenTelemetry Authors
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
//# sourceMappingURL=tracer_provider.js.map
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
/* 774 */,
|
/* 774 */,
|
||||||
/* 775 */,
|
/* 775 */,
|
||||||
/* 776 */,
|
/* 776 */,
|
||||||
|
|||||||
1302
dist/setup/index.js
vendored
1302
dist/setup/index.js
vendored
@ -3773,20 +3773,7 @@ exports.tokenList_serializeSteps = tokenList_serializeSteps;
|
|||||||
/* 55 */,
|
/* 55 */,
|
||||||
/* 56 */,
|
/* 56 */,
|
||||||
/* 57 */,
|
/* 57 */,
|
||||||
/* 58 */
|
/* 58 */,
|
||||||
/***/ (function(module, __unusedexports, __webpack_require__) {
|
|
||||||
|
|
||||||
// Unique ID creation requires a high quality random # generator. In node.js
|
|
||||||
// this is pretty straight-forward - we use the crypto API.
|
|
||||||
|
|
||||||
var crypto = __webpack_require__(373);
|
|
||||||
|
|
||||||
module.exports = function nodeRNG() {
|
|
||||||
return crypto.randomBytes(16);
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
/* 59 */
|
/* 59 */
|
||||||
/***/ (function(__unusedmodule, exports) {
|
/***/ (function(__unusedmodule, exports) {
|
||||||
|
|
||||||
@ -4711,7 +4698,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.JavaBase = void 0;
|
exports.JavaBase = void 0;
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
@ -9539,10 +9526,7 @@ const options_1 = __webpack_require__(538);
|
|||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
const versionSalt = '1.0';
|
const versionSalt = '1.0';
|
||||||
function getCacheApiUrl(resource) {
|
function getCacheApiUrl(resource) {
|
||||||
// Ideally we just use ACTIONS_CACHE_URL
|
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
||||||
const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||
|
|
||||||
process.env['ACTIONS_RUNTIME_URL'] ||
|
|
||||||
'').replace('pipelines', 'artifactcache');
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||||
}
|
}
|
||||||
@ -10539,615 +10523,17 @@ exports.pop = pop;
|
|||||||
/* 137 */,
|
/* 137 */,
|
||||||
/* 138 */,
|
/* 138 */,
|
||||||
/* 139 */
|
/* 139 */
|
||||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
// Unique ID creation requires a high quality random # generator. In node.js
|
||||||
|
// this is pretty straight-forward - we use the crypto API.
|
||||||
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var crypto = __webpack_require__(373);
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
module.exports = function nodeRNG() {
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
return crypto.randomBytes(16);
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const core = __importStar(__webpack_require__(470));
|
|
||||||
const io = __importStar(__webpack_require__(1));
|
|
||||||
const fs = __importStar(__webpack_require__(747));
|
|
||||||
const mm = __importStar(__webpack_require__(31));
|
|
||||||
const os = __importStar(__webpack_require__(87));
|
|
||||||
const path = __importStar(__webpack_require__(622));
|
|
||||||
const httpm = __importStar(__webpack_require__(539));
|
|
||||||
const semver = __importStar(__webpack_require__(550));
|
|
||||||
const stream = __importStar(__webpack_require__(794));
|
|
||||||
const util = __importStar(__webpack_require__(669));
|
|
||||||
const v4_1 = __importDefault(__webpack_require__(494));
|
|
||||||
const exec_1 = __webpack_require__(986);
|
|
||||||
const assert_1 = __webpack_require__(357);
|
|
||||||
const retry_helper_1 = __webpack_require__(979);
|
|
||||||
class HTTPError extends Error {
|
|
||||||
constructor(httpStatusCode) {
|
|
||||||
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
|
||||||
this.httpStatusCode = httpStatusCode;
|
|
||||||
Object.setPrototypeOf(this, new.target.prototype);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.HTTPError = HTTPError;
|
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
|
||||||
const IS_MAC = process.platform === 'darwin';
|
|
||||||
const userAgent = 'actions/tool-cache';
|
|
||||||
/**
|
|
||||||
* Download a tool from an url and stream it into a file
|
|
||||||
*
|
|
||||||
* @param url url of tool to download
|
|
||||||
* @param dest path to download tool
|
|
||||||
* @param auth authorization header
|
|
||||||
* @returns path to downloaded tool
|
|
||||||
*/
|
|
||||||
function downloadTool(url, dest, auth) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
dest = dest || path.join(_getTempDirectory(), v4_1.default());
|
|
||||||
yield io.mkdirP(path.dirname(dest));
|
|
||||||
core.debug(`Downloading ${url}`);
|
|
||||||
core.debug(`Destination ${dest}`);
|
|
||||||
const maxAttempts = 3;
|
|
||||||
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
|
|
||||||
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
|
|
||||||
const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
|
|
||||||
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield downloadToolAttempt(url, dest || '', auth);
|
|
||||||
}), (err) => {
|
|
||||||
if (err instanceof HTTPError && err.httpStatusCode) {
|
|
||||||
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
|
|
||||||
if (err.httpStatusCode < 500 &&
|
|
||||||
err.httpStatusCode !== 408 &&
|
|
||||||
err.httpStatusCode !== 429) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Otherwise retry
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadTool = downloadTool;
|
|
||||||
function downloadToolAttempt(url, dest, auth) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (fs.existsSync(dest)) {
|
|
||||||
throw new Error(`Destination file path ${dest} already exists`);
|
|
||||||
}
|
|
||||||
// Get the response headers
|
|
||||||
const http = new httpm.HttpClient(userAgent, [], {
|
|
||||||
allowRetries: false
|
|
||||||
});
|
|
||||||
let headers;
|
|
||||||
if (auth) {
|
|
||||||
core.debug('set auth');
|
|
||||||
headers = {
|
|
||||||
authorization: auth
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const response = yield http.get(url, headers);
|
|
||||||
if (response.message.statusCode !== 200) {
|
|
||||||
const err = new HTTPError(response.message.statusCode);
|
|
||||||
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
// Download the response body
|
|
||||||
const pipeline = util.promisify(stream.pipeline);
|
|
||||||
const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
|
|
||||||
const readStream = responseMessageFactory();
|
|
||||||
let succeeded = false;
|
|
||||||
try {
|
|
||||||
yield pipeline(readStream, fs.createWriteStream(dest));
|
|
||||||
core.debug('download complete');
|
|
||||||
succeeded = true;
|
|
||||||
return dest;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
// Error, delete dest before retry
|
|
||||||
if (!succeeded) {
|
|
||||||
core.debug('download failed');
|
|
||||||
try {
|
|
||||||
yield io.rmRF(dest);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
core.debug(`Failed to delete '${dest}'. ${err.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Extract a .7z file
|
|
||||||
*
|
|
||||||
* @param file path to the .7z file
|
|
||||||
* @param dest destination directory. Optional.
|
|
||||||
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
|
||||||
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
|
||||||
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
|
||||||
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
|
||||||
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
|
||||||
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
|
||||||
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
|
||||||
* to 7zr.exe can be pass to this function.
|
|
||||||
* @returns path to the destination directory
|
|
||||||
*/
|
|
||||||
function extract7z(file, dest, _7zPath) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
|
||||||
assert_1.ok(file, 'parameter "file" is required');
|
|
||||||
dest = yield _createExtractFolder(dest);
|
|
||||||
const originalCwd = process.cwd();
|
|
||||||
process.chdir(dest);
|
|
||||||
if (_7zPath) {
|
|
||||||
try {
|
|
||||||
const logLevel = core.isDebug() ? '-bb1' : '-bb0';
|
|
||||||
const args = [
|
|
||||||
'x',
|
|
||||||
logLevel,
|
|
||||||
'-bd',
|
|
||||||
'-sccUTF-8',
|
|
||||||
file
|
|
||||||
];
|
|
||||||
const options = {
|
|
||||||
silent: true
|
|
||||||
};
|
|
||||||
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
process.chdir(originalCwd);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const escapedScript = path
|
|
||||||
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
|
||||||
.replace(/'/g, "''")
|
|
||||||
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
||||||
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
||||||
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
||||||
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
|
||||||
const args = [
|
|
||||||
'-NoLogo',
|
|
||||||
'-Sta',
|
|
||||||
'-NoProfile',
|
|
||||||
'-NonInteractive',
|
|
||||||
'-ExecutionPolicy',
|
|
||||||
'Unrestricted',
|
|
||||||
'-Command',
|
|
||||||
command
|
|
||||||
];
|
|
||||||
const options = {
|
|
||||||
silent: true
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const powershellPath = yield io.which('powershell', true);
|
|
||||||
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
process.chdir(originalCwd);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.extract7z = extract7z;
|
|
||||||
/**
|
|
||||||
* Extract a compressed tar archive
|
|
||||||
*
|
|
||||||
* @param file path to the tar
|
|
||||||
* @param dest destination directory. Optional.
|
|
||||||
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
|
||||||
* @returns path to the destination directory
|
|
||||||
*/
|
|
||||||
function extractTar(file, dest, flags = 'xz') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (!file) {
|
|
||||||
throw new Error("parameter 'file' is required");
|
|
||||||
}
|
|
||||||
// Create dest
|
|
||||||
dest = yield _createExtractFolder(dest);
|
|
||||||
// Determine whether GNU tar
|
|
||||||
core.debug('Checking tar --version');
|
|
||||||
let versionOutput = '';
|
|
||||||
yield exec_1.exec('tar --version', [], {
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => (versionOutput += data.toString()),
|
|
||||||
stderr: (data) => (versionOutput += data.toString())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
core.debug(versionOutput.trim());
|
|
||||||
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
|
|
||||||
// Initialize args
|
|
||||||
let args;
|
|
||||||
if (flags instanceof Array) {
|
|
||||||
args = flags;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [flags];
|
|
||||||
}
|
|
||||||
if (core.isDebug() && !flags.includes('v')) {
|
|
||||||
args.push('-v');
|
|
||||||
}
|
|
||||||
let destArg = dest;
|
|
||||||
let fileArg = file;
|
|
||||||
if (IS_WINDOWS && isGnuTar) {
|
|
||||||
args.push('--force-local');
|
|
||||||
destArg = dest.replace(/\\/g, '/');
|
|
||||||
// Technically only the dest needs to have `/` but for aesthetic consistency
|
|
||||||
// convert slashes in the file arg too.
|
|
||||||
fileArg = file.replace(/\\/g, '/');
|
|
||||||
}
|
|
||||||
if (isGnuTar) {
|
|
||||||
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
|
||||||
args.push('--warning=no-unknown-keyword');
|
|
||||||
}
|
|
||||||
args.push('-C', destArg, '-f', fileArg);
|
|
||||||
yield exec_1.exec(`tar`, args);
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.extractTar = extractTar;
|
|
||||||
/**
|
|
||||||
* Extract a xar compatible archive
|
|
||||||
*
|
|
||||||
* @param file path to the archive
|
|
||||||
* @param dest destination directory. Optional.
|
|
||||||
* @param flags flags for the xar. Optional.
|
|
||||||
* @returns path to the destination directory
|
|
||||||
*/
|
|
||||||
function extractXar(file, dest, flags = []) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');
|
|
||||||
assert_1.ok(file, 'parameter "file" is required');
|
|
||||||
dest = yield _createExtractFolder(dest);
|
|
||||||
let args;
|
|
||||||
if (flags instanceof Array) {
|
|
||||||
args = flags;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
args = [flags];
|
|
||||||
}
|
|
||||||
args.push('-x', '-C', dest, '-f', file);
|
|
||||||
if (core.isDebug()) {
|
|
||||||
args.push('-v');
|
|
||||||
}
|
|
||||||
const xarPath = yield io.which('xar', true);
|
|
||||||
yield exec_1.exec(`"${xarPath}"`, _unique(args));
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.extractXar = extractXar;
|
|
||||||
/**
|
|
||||||
* Extract a zip
|
|
||||||
*
|
|
||||||
* @param file path to the zip
|
|
||||||
* @param dest destination directory. Optional.
|
|
||||||
* @returns path to the destination directory
|
|
||||||
*/
|
|
||||||
function extractZip(file, dest) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (!file) {
|
|
||||||
throw new Error("parameter 'file' is required");
|
|
||||||
}
|
|
||||||
dest = yield _createExtractFolder(dest);
|
|
||||||
if (IS_WINDOWS) {
|
|
||||||
yield extractZipWin(file, dest);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield extractZipNix(file, dest);
|
|
||||||
}
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.extractZip = extractZip;
|
|
||||||
function extractZipWin(file, dest) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
// build the powershell command
|
|
||||||
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
||||||
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
||||||
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
|
||||||
// run powershell
|
|
||||||
const powershellPath = yield io.which('powershell', true);
|
|
||||||
const args = [
|
|
||||||
'-NoLogo',
|
|
||||||
'-Sta',
|
|
||||||
'-NoProfile',
|
|
||||||
'-NonInteractive',
|
|
||||||
'-ExecutionPolicy',
|
|
||||||
'Unrestricted',
|
|
||||||
'-Command',
|
|
||||||
command
|
|
||||||
];
|
|
||||||
yield exec_1.exec(`"${powershellPath}"`, args);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function extractZipNix(file, dest) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const unzipPath = yield io.which('unzip', true);
|
|
||||||
const args = [file];
|
|
||||||
if (!core.isDebug()) {
|
|
||||||
args.unshift('-q');
|
|
||||||
}
|
|
||||||
yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest });
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Caches a directory and installs it into the tool cacheDir
|
|
||||||
*
|
|
||||||
* @param sourceDir the directory to cache into tools
|
|
||||||
* @param tool tool name
|
|
||||||
* @param version version of the tool. semver format
|
|
||||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
||||||
*/
|
|
||||||
function cacheDir(sourceDir, tool, version, arch) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
version = semver.clean(version) || version;
|
|
||||||
arch = arch || os.arch();
|
|
||||||
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
||||||
core.debug(`source dir: ${sourceDir}`);
|
|
||||||
if (!fs.statSync(sourceDir).isDirectory()) {
|
|
||||||
throw new Error('sourceDir is not a directory');
|
|
||||||
}
|
|
||||||
// Create the tool dir
|
|
||||||
const destPath = yield _createToolPath(tool, version, arch);
|
|
||||||
// copy each child item. do not move. move can fail on Windows
|
|
||||||
// due to anti-virus software having an open handle on a file.
|
|
||||||
for (const itemName of fs.readdirSync(sourceDir)) {
|
|
||||||
const s = path.join(sourceDir, itemName);
|
|
||||||
yield io.cp(s, destPath, { recursive: true });
|
|
||||||
}
|
|
||||||
// write .complete
|
|
||||||
_completeToolPath(tool, version, arch);
|
|
||||||
return destPath;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.cacheDir = cacheDir;
|
|
||||||
/**
|
|
||||||
* Caches a downloaded file (GUID) and installs it
|
|
||||||
* into the tool cache with a given targetName
|
|
||||||
*
|
|
||||||
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
|
||||||
* @param targetFile the name of the file name in the tools directory
|
|
||||||
* @param tool tool name
|
|
||||||
* @param version version of the tool. semver format
|
|
||||||
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
|
||||||
*/
|
|
||||||
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
version = semver.clean(version) || version;
|
|
||||||
arch = arch || os.arch();
|
|
||||||
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
|
||||||
core.debug(`source file: ${sourceFile}`);
|
|
||||||
if (!fs.statSync(sourceFile).isFile()) {
|
|
||||||
throw new Error('sourceFile is not a file');
|
|
||||||
}
|
|
||||||
// create the tool dir
|
|
||||||
const destFolder = yield _createToolPath(tool, version, arch);
|
|
||||||
// copy instead of move. move can fail on Windows due to
|
|
||||||
// anti-virus software having an open handle on a file.
|
|
||||||
const destPath = path.join(destFolder, targetFile);
|
|
||||||
core.debug(`destination file ${destPath}`);
|
|
||||||
yield io.cp(sourceFile, destPath);
|
|
||||||
// write .complete
|
|
||||||
_completeToolPath(tool, version, arch);
|
|
||||||
return destFolder;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.cacheFile = cacheFile;
|
|
||||||
/**
|
|
||||||
* Finds the path to a tool version in the local installed tool cache
|
|
||||||
*
|
|
||||||
* @param toolName name of the tool
|
|
||||||
* @param versionSpec version of the tool
|
|
||||||
* @param arch optional arch. defaults to arch of computer
|
|
||||||
*/
|
|
||||||
function find(toolName, versionSpec, arch) {
|
|
||||||
if (!toolName) {
|
|
||||||
throw new Error('toolName parameter is required');
|
|
||||||
}
|
|
||||||
if (!versionSpec) {
|
|
||||||
throw new Error('versionSpec parameter is required');
|
|
||||||
}
|
|
||||||
arch = arch || os.arch();
|
|
||||||
// attempt to resolve an explicit version
|
|
||||||
if (!_isExplicitVersion(versionSpec)) {
|
|
||||||
const localVersions = findAllVersions(toolName, arch);
|
|
||||||
const match = _evaluateVersions(localVersions, versionSpec);
|
|
||||||
versionSpec = match;
|
|
||||||
}
|
|
||||||
// check for the explicit version in the cache
|
|
||||||
let toolPath = '';
|
|
||||||
if (versionSpec) {
|
|
||||||
versionSpec = semver.clean(versionSpec) || '';
|
|
||||||
const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
|
|
||||||
core.debug(`checking cache: ${cachePath}`);
|
|
||||||
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
|
||||||
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
|
||||||
toolPath = cachePath;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.debug('not found');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return toolPath;
|
|
||||||
}
|
|
||||||
exports.find = find;
|
|
||||||
/**
|
|
||||||
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
|
||||||
*
|
|
||||||
* @param toolName name of the tool
|
|
||||||
* @param arch optional arch. defaults to arch of computer
|
|
||||||
*/
|
|
||||||
function findAllVersions(toolName, arch) {
|
|
||||||
const versions = [];
|
|
||||||
arch = arch || os.arch();
|
|
||||||
const toolPath = path.join(_getCacheDirectory(), toolName);
|
|
||||||
if (fs.existsSync(toolPath)) {
|
|
||||||
const children = fs.readdirSync(toolPath);
|
|
||||||
for (const child of children) {
|
|
||||||
if (_isExplicitVersion(child)) {
|
|
||||||
const fullPath = path.join(toolPath, child, arch || '');
|
|
||||||
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
|
||||||
versions.push(child);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return versions;
|
|
||||||
}
|
|
||||||
exports.findAllVersions = findAllVersions;
|
|
||||||
function getManifestFromRepo(owner, repo, auth, branch = 'master') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
let releases = [];
|
|
||||||
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;
|
|
||||||
const http = new httpm.HttpClient('tool-cache');
|
|
||||||
const headers = {};
|
|
||||||
if (auth) {
|
|
||||||
core.debug('set auth');
|
|
||||||
headers.authorization = auth;
|
|
||||||
}
|
|
||||||
const response = yield http.getJson(treeUrl, headers);
|
|
||||||
if (!response.result) {
|
|
||||||
return releases;
|
|
||||||
}
|
|
||||||
let manifestUrl = '';
|
|
||||||
for (const item of response.result.tree) {
|
|
||||||
if (item.path === 'versions-manifest.json') {
|
|
||||||
manifestUrl = item.url;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
headers['accept'] = 'application/vnd.github.VERSION.raw';
|
|
||||||
let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();
|
|
||||||
if (versionsRaw) {
|
|
||||||
// shouldn't be needed but protects against invalid json saved with BOM
|
|
||||||
versionsRaw = versionsRaw.replace(/^\uFEFF/, '');
|
|
||||||
try {
|
|
||||||
releases = JSON.parse(versionsRaw);
|
|
||||||
}
|
|
||||||
catch (_a) {
|
|
||||||
core.debug('Invalid json');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return releases;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.getManifestFromRepo = getManifestFromRepo;
|
|
||||||
function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
// wrap the internal impl
|
|
||||||
const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);
|
|
||||||
return match;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.findFromManifest = findFromManifest;
|
|
||||||
function _createExtractFolder(dest) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (!dest) {
|
|
||||||
// create a temp dir
|
|
||||||
dest = path.join(_getTempDirectory(), v4_1.default());
|
|
||||||
}
|
|
||||||
yield io.mkdirP(dest);
|
|
||||||
return dest;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function _createToolPath(tool, version, arch) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
||||||
core.debug(`destination ${folderPath}`);
|
|
||||||
const markerPath = `${folderPath}.complete`;
|
|
||||||
yield io.rmRF(folderPath);
|
|
||||||
yield io.rmRF(markerPath);
|
|
||||||
yield io.mkdirP(folderPath);
|
|
||||||
return folderPath;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function _completeToolPath(tool, version, arch) {
|
|
||||||
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
|
||||||
const markerPath = `${folderPath}.complete`;
|
|
||||||
fs.writeFileSync(markerPath, '');
|
|
||||||
core.debug('finished caching tool');
|
|
||||||
}
|
|
||||||
function _isExplicitVersion(versionSpec) {
|
|
||||||
const c = semver.clean(versionSpec) || '';
|
|
||||||
core.debug(`isExplicit: ${c}`);
|
|
||||||
const valid = semver.valid(c) != null;
|
|
||||||
core.debug(`explicit? ${valid}`);
|
|
||||||
return valid;
|
|
||||||
}
|
|
||||||
function _evaluateVersions(versions, versionSpec) {
|
|
||||||
let version = '';
|
|
||||||
core.debug(`evaluating ${versions.length} versions`);
|
|
||||||
versions = versions.sort((a, b) => {
|
|
||||||
if (semver.gt(a, b)) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
});
|
|
||||||
for (let i = versions.length - 1; i >= 0; i--) {
|
|
||||||
const potential = versions[i];
|
|
||||||
const satisfied = semver.satisfies(potential, versionSpec);
|
|
||||||
if (satisfied) {
|
|
||||||
version = potential;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (version) {
|
|
||||||
core.debug(`matched: ${version}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.debug('match not found');
|
|
||||||
}
|
|
||||||
return version;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets RUNNER_TOOL_CACHE
|
|
||||||
*/
|
|
||||||
function _getCacheDirectory() {
|
|
||||||
const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
|
|
||||||
assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
|
|
||||||
return cacheDirectory;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets RUNNER_TEMP
|
|
||||||
*/
|
|
||||||
function _getTempDirectory() {
|
|
||||||
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
|
||||||
assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
|
||||||
return tempDirectory;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets a global variable
|
|
||||||
*/
|
|
||||||
function _getGlobal(key, defaultValue) {
|
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
const value = global[key];
|
|
||||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
|
||||||
return value !== undefined ? value : defaultValue;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns an array of unique values.
|
|
||||||
* @param values Values to make unique.
|
|
||||||
*/
|
|
||||||
function _unique(values) {
|
|
||||||
return Array.from(new Set(values));
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=tool-cache.js.map
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
/* 140 */,
|
/* 140 */,
|
||||||
@ -11478,7 +10864,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.LocalDistribution = void 0;
|
exports.LocalDistribution = void 0;
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
@ -13639,7 +13025,618 @@ module.exports = {
|
|||||||
/* 183 */,
|
/* 183 */,
|
||||||
/* 184 */,
|
/* 184 */,
|
||||||
/* 185 */,
|
/* 185 */,
|
||||||
/* 186 */,
|
/* 186 */
|
||||||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(__webpack_require__(470));
|
||||||
|
const io = __importStar(__webpack_require__(1));
|
||||||
|
const fs = __importStar(__webpack_require__(747));
|
||||||
|
const mm = __importStar(__webpack_require__(31));
|
||||||
|
const os = __importStar(__webpack_require__(87));
|
||||||
|
const path = __importStar(__webpack_require__(622));
|
||||||
|
const httpm = __importStar(__webpack_require__(539));
|
||||||
|
const semver = __importStar(__webpack_require__(550));
|
||||||
|
const stream = __importStar(__webpack_require__(794));
|
||||||
|
const util = __importStar(__webpack_require__(669));
|
||||||
|
const v4_1 = __importDefault(__webpack_require__(494));
|
||||||
|
const exec_1 = __webpack_require__(986);
|
||||||
|
const assert_1 = __webpack_require__(357);
|
||||||
|
const retry_helper_1 = __webpack_require__(979);
|
||||||
|
class HTTPError extends Error {
|
||||||
|
constructor(httpStatusCode) {
|
||||||
|
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
||||||
|
this.httpStatusCode = httpStatusCode;
|
||||||
|
Object.setPrototypeOf(this, new.target.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HTTPError = HTTPError;
|
||||||
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
|
const IS_MAC = process.platform === 'darwin';
|
||||||
|
const userAgent = 'actions/tool-cache';
|
||||||
|
/**
|
||||||
|
* Download a tool from an url and stream it into a file
|
||||||
|
*
|
||||||
|
* @param url url of tool to download
|
||||||
|
* @param dest path to download tool
|
||||||
|
* @param auth authorization header
|
||||||
|
* @returns path to downloaded tool
|
||||||
|
*/
|
||||||
|
function downloadTool(url, dest, auth) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
dest = dest || path.join(_getTempDirectory(), v4_1.default());
|
||||||
|
yield io.mkdirP(path.dirname(dest));
|
||||||
|
core.debug(`Downloading ${url}`);
|
||||||
|
core.debug(`Destination ${dest}`);
|
||||||
|
const maxAttempts = 3;
|
||||||
|
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
|
||||||
|
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
|
||||||
|
const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds);
|
||||||
|
return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield downloadToolAttempt(url, dest || '', auth);
|
||||||
|
}), (err) => {
|
||||||
|
if (err instanceof HTTPError && err.httpStatusCode) {
|
||||||
|
// Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests
|
||||||
|
if (err.httpStatusCode < 500 &&
|
||||||
|
err.httpStatusCode !== 408 &&
|
||||||
|
err.httpStatusCode !== 429) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise retry
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.downloadTool = downloadTool;
|
||||||
|
function downloadToolAttempt(url, dest, auth) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (fs.existsSync(dest)) {
|
||||||
|
throw new Error(`Destination file path ${dest} already exists`);
|
||||||
|
}
|
||||||
|
// Get the response headers
|
||||||
|
const http = new httpm.HttpClient(userAgent, [], {
|
||||||
|
allowRetries: false
|
||||||
|
});
|
||||||
|
let headers;
|
||||||
|
if (auth) {
|
||||||
|
core.debug('set auth');
|
||||||
|
headers = {
|
||||||
|
authorization: auth
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const response = yield http.get(url, headers);
|
||||||
|
if (response.message.statusCode !== 200) {
|
||||||
|
const err = new HTTPError(response.message.statusCode);
|
||||||
|
core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
// Download the response body
|
||||||
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message);
|
||||||
|
const readStream = responseMessageFactory();
|
||||||
|
let succeeded = false;
|
||||||
|
try {
|
||||||
|
yield pipeline(readStream, fs.createWriteStream(dest));
|
||||||
|
core.debug('download complete');
|
||||||
|
succeeded = true;
|
||||||
|
return dest;
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// Error, delete dest before retry
|
||||||
|
if (!succeeded) {
|
||||||
|
core.debug('download failed');
|
||||||
|
try {
|
||||||
|
yield io.rmRF(dest);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
core.debug(`Failed to delete '${dest}'. ${err.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Extract a .7z file
|
||||||
|
*
|
||||||
|
* @param file path to the .7z file
|
||||||
|
* @param dest destination directory. Optional.
|
||||||
|
* @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
|
||||||
|
* problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
|
||||||
|
* gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
|
||||||
|
* bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
|
||||||
|
* interface, it is smaller than the full command line interface, and it does support long paths. At the
|
||||||
|
* time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
|
||||||
|
* Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
|
||||||
|
* to 7zr.exe can be pass to this function.
|
||||||
|
* @returns path to the destination directory
|
||||||
|
*/
|
||||||
|
function extract7z(file, dest, _7zPath) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
|
||||||
|
assert_1.ok(file, 'parameter "file" is required');
|
||||||
|
dest = yield _createExtractFolder(dest);
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
process.chdir(dest);
|
||||||
|
if (_7zPath) {
|
||||||
|
try {
|
||||||
|
const logLevel = core.isDebug() ? '-bb1' : '-bb0';
|
||||||
|
const args = [
|
||||||
|
'x',
|
||||||
|
logLevel,
|
||||||
|
'-bd',
|
||||||
|
'-sccUTF-8',
|
||||||
|
file
|
||||||
|
];
|
||||||
|
const options = {
|
||||||
|
silent: true
|
||||||
|
};
|
||||||
|
yield exec_1.exec(`"${_7zPath}"`, args, options);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const escapedScript = path
|
||||||
|
.join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
|
||||||
|
.replace(/'/g, "''")
|
||||||
|
.replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
||||||
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||||
|
const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||||
|
const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
|
||||||
|
const args = [
|
||||||
|
'-NoLogo',
|
||||||
|
'-Sta',
|
||||||
|
'-NoProfile',
|
||||||
|
'-NonInteractive',
|
||||||
|
'-ExecutionPolicy',
|
||||||
|
'Unrestricted',
|
||||||
|
'-Command',
|
||||||
|
command
|
||||||
|
];
|
||||||
|
const options = {
|
||||||
|
silent: true
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const powershellPath = yield io.which('powershell', true);
|
||||||
|
yield exec_1.exec(`"${powershellPath}"`, args, options);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.extract7z = extract7z;
|
||||||
|
/**
|
||||||
|
* Extract a compressed tar archive
|
||||||
|
*
|
||||||
|
* @param file path to the tar
|
||||||
|
* @param dest destination directory. Optional.
|
||||||
|
* @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
|
||||||
|
* @returns path to the destination directory
|
||||||
|
*/
|
||||||
|
function extractTar(file, dest, flags = 'xz') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (!file) {
|
||||||
|
throw new Error("parameter 'file' is required");
|
||||||
|
}
|
||||||
|
// Create dest
|
||||||
|
dest = yield _createExtractFolder(dest);
|
||||||
|
// Determine whether GNU tar
|
||||||
|
core.debug('Checking tar --version');
|
||||||
|
let versionOutput = '';
|
||||||
|
yield exec_1.exec('tar --version', [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => (versionOutput += data.toString()),
|
||||||
|
stderr: (data) => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
core.debug(versionOutput.trim());
|
||||||
|
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
|
||||||
|
// Initialize args
|
||||||
|
let args;
|
||||||
|
if (flags instanceof Array) {
|
||||||
|
args = flags;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [flags];
|
||||||
|
}
|
||||||
|
if (core.isDebug() && !flags.includes('v')) {
|
||||||
|
args.push('-v');
|
||||||
|
}
|
||||||
|
let destArg = dest;
|
||||||
|
let fileArg = file;
|
||||||
|
if (IS_WINDOWS && isGnuTar) {
|
||||||
|
args.push('--force-local');
|
||||||
|
destArg = dest.replace(/\\/g, '/');
|
||||||
|
// Technically only the dest needs to have `/` but for aesthetic consistency
|
||||||
|
// convert slashes in the file arg too.
|
||||||
|
fileArg = file.replace(/\\/g, '/');
|
||||||
|
}
|
||||||
|
if (isGnuTar) {
|
||||||
|
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
||||||
|
args.push('--warning=no-unknown-keyword');
|
||||||
|
}
|
||||||
|
args.push('-C', destArg, '-f', fileArg);
|
||||||
|
yield exec_1.exec(`tar`, args);
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.extractTar = extractTar;
|
||||||
|
/**
|
||||||
|
* Extract a xar compatible archive
|
||||||
|
*
|
||||||
|
* @param file path to the archive
|
||||||
|
* @param dest destination directory. Optional.
|
||||||
|
* @param flags flags for the xar. Optional.
|
||||||
|
* @returns path to the destination directory
|
||||||
|
*/
|
||||||
|
function extractXar(file, dest, flags = []) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
assert_1.ok(IS_MAC, 'extractXar() not supported on current OS');
|
||||||
|
assert_1.ok(file, 'parameter "file" is required');
|
||||||
|
dest = yield _createExtractFolder(dest);
|
||||||
|
let args;
|
||||||
|
if (flags instanceof Array) {
|
||||||
|
args = flags;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
args = [flags];
|
||||||
|
}
|
||||||
|
args.push('-x', '-C', dest, '-f', file);
|
||||||
|
if (core.isDebug()) {
|
||||||
|
args.push('-v');
|
||||||
|
}
|
||||||
|
const xarPath = yield io.which('xar', true);
|
||||||
|
yield exec_1.exec(`"${xarPath}"`, _unique(args));
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.extractXar = extractXar;
|
||||||
|
/**
|
||||||
|
* Extract a zip
|
||||||
|
*
|
||||||
|
* @param file path to the zip
|
||||||
|
* @param dest destination directory. Optional.
|
||||||
|
* @returns path to the destination directory
|
||||||
|
*/
|
||||||
|
function extractZip(file, dest) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (!file) {
|
||||||
|
throw new Error("parameter 'file' is required");
|
||||||
|
}
|
||||||
|
dest = yield _createExtractFolder(dest);
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
yield extractZipWin(file, dest);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
yield extractZipNix(file, dest);
|
||||||
|
}
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.extractZip = extractZip;
|
||||||
|
function extractZipWin(file, dest) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
// build the powershell command
|
||||||
|
const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
||||||
|
const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
||||||
|
const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
||||||
|
// run powershell
|
||||||
|
const powershellPath = yield io.which('powershell', true);
|
||||||
|
const args = [
|
||||||
|
'-NoLogo',
|
||||||
|
'-Sta',
|
||||||
|
'-NoProfile',
|
||||||
|
'-NonInteractive',
|
||||||
|
'-ExecutionPolicy',
|
||||||
|
'Unrestricted',
|
||||||
|
'-Command',
|
||||||
|
command
|
||||||
|
];
|
||||||
|
yield exec_1.exec(`"${powershellPath}"`, args);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function extractZipNix(file, dest) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const unzipPath = yield io.which('unzip', true);
|
||||||
|
const args = [file];
|
||||||
|
if (!core.isDebug()) {
|
||||||
|
args.unshift('-q');
|
||||||
|
}
|
||||||
|
yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Caches a directory and installs it into the tool cacheDir
|
||||||
|
*
|
||||||
|
* @param sourceDir the directory to cache into tools
|
||||||
|
* @param tool tool name
|
||||||
|
* @param version version of the tool. semver format
|
||||||
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||||
|
*/
|
||||||
|
function cacheDir(sourceDir, tool, version, arch) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
version = semver.clean(version) || version;
|
||||||
|
arch = arch || os.arch();
|
||||||
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
||||||
|
core.debug(`source dir: ${sourceDir}`);
|
||||||
|
if (!fs.statSync(sourceDir).isDirectory()) {
|
||||||
|
throw new Error('sourceDir is not a directory');
|
||||||
|
}
|
||||||
|
// Create the tool dir
|
||||||
|
const destPath = yield _createToolPath(tool, version, arch);
|
||||||
|
// copy each child item. do not move. move can fail on Windows
|
||||||
|
// due to anti-virus software having an open handle on a file.
|
||||||
|
for (const itemName of fs.readdirSync(sourceDir)) {
|
||||||
|
const s = path.join(sourceDir, itemName);
|
||||||
|
yield io.cp(s, destPath, { recursive: true });
|
||||||
|
}
|
||||||
|
// write .complete
|
||||||
|
_completeToolPath(tool, version, arch);
|
||||||
|
return destPath;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.cacheDir = cacheDir;
|
||||||
|
/**
|
||||||
|
* Caches a downloaded file (GUID) and installs it
|
||||||
|
* into the tool cache with a given targetName
|
||||||
|
*
|
||||||
|
* @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
|
||||||
|
* @param targetFile the name of the file name in the tools directory
|
||||||
|
* @param tool tool name
|
||||||
|
* @param version version of the tool. semver format
|
||||||
|
* @param arch architecture of the tool. Optional. Defaults to machine architecture
|
||||||
|
*/
|
||||||
|
function cacheFile(sourceFile, targetFile, tool, version, arch) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
version = semver.clean(version) || version;
|
||||||
|
arch = arch || os.arch();
|
||||||
|
core.debug(`Caching tool ${tool} ${version} ${arch}`);
|
||||||
|
core.debug(`source file: ${sourceFile}`);
|
||||||
|
if (!fs.statSync(sourceFile).isFile()) {
|
||||||
|
throw new Error('sourceFile is not a file');
|
||||||
|
}
|
||||||
|
// create the tool dir
|
||||||
|
const destFolder = yield _createToolPath(tool, version, arch);
|
||||||
|
// copy instead of move. move can fail on Windows due to
|
||||||
|
// anti-virus software having an open handle on a file.
|
||||||
|
const destPath = path.join(destFolder, targetFile);
|
||||||
|
core.debug(`destination file ${destPath}`);
|
||||||
|
yield io.cp(sourceFile, destPath);
|
||||||
|
// write .complete
|
||||||
|
_completeToolPath(tool, version, arch);
|
||||||
|
return destFolder;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.cacheFile = cacheFile;
|
||||||
|
/**
|
||||||
|
* Finds the path to a tool version in the local installed tool cache
|
||||||
|
*
|
||||||
|
* @param toolName name of the tool
|
||||||
|
* @param versionSpec version of the tool
|
||||||
|
* @param arch optional arch. defaults to arch of computer
|
||||||
|
*/
|
||||||
|
function find(toolName, versionSpec, arch) {
|
||||||
|
if (!toolName) {
|
||||||
|
throw new Error('toolName parameter is required');
|
||||||
|
}
|
||||||
|
if (!versionSpec) {
|
||||||
|
throw new Error('versionSpec parameter is required');
|
||||||
|
}
|
||||||
|
arch = arch || os.arch();
|
||||||
|
// attempt to resolve an explicit version
|
||||||
|
if (!_isExplicitVersion(versionSpec)) {
|
||||||
|
const localVersions = findAllVersions(toolName, arch);
|
||||||
|
const match = _evaluateVersions(localVersions, versionSpec);
|
||||||
|
versionSpec = match;
|
||||||
|
}
|
||||||
|
// check for the explicit version in the cache
|
||||||
|
let toolPath = '';
|
||||||
|
if (versionSpec) {
|
||||||
|
versionSpec = semver.clean(versionSpec) || '';
|
||||||
|
const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch);
|
||||||
|
core.debug(`checking cache: ${cachePath}`);
|
||||||
|
if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
|
||||||
|
core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
|
||||||
|
toolPath = cachePath;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.debug('not found');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return toolPath;
|
||||||
|
}
|
||||||
|
exports.find = find;
|
||||||
|
/**
|
||||||
|
* Finds the paths to all versions of a tool that are installed in the local tool cache
|
||||||
|
*
|
||||||
|
* @param toolName name of the tool
|
||||||
|
* @param arch optional arch. defaults to arch of computer
|
||||||
|
*/
|
||||||
|
function findAllVersions(toolName, arch) {
|
||||||
|
const versions = [];
|
||||||
|
arch = arch || os.arch();
|
||||||
|
const toolPath = path.join(_getCacheDirectory(), toolName);
|
||||||
|
if (fs.existsSync(toolPath)) {
|
||||||
|
const children = fs.readdirSync(toolPath);
|
||||||
|
for (const child of children) {
|
||||||
|
if (_isExplicitVersion(child)) {
|
||||||
|
const fullPath = path.join(toolPath, child, arch || '');
|
||||||
|
if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
|
||||||
|
versions.push(child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return versions;
|
||||||
|
}
|
||||||
|
exports.findAllVersions = findAllVersions;
|
||||||
|
function getManifestFromRepo(owner, repo, auth, branch = 'master') {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let releases = [];
|
||||||
|
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`;
|
||||||
|
const http = new httpm.HttpClient('tool-cache');
|
||||||
|
const headers = {};
|
||||||
|
if (auth) {
|
||||||
|
core.debug('set auth');
|
||||||
|
headers.authorization = auth;
|
||||||
|
}
|
||||||
|
const response = yield http.getJson(treeUrl, headers);
|
||||||
|
if (!response.result) {
|
||||||
|
return releases;
|
||||||
|
}
|
||||||
|
let manifestUrl = '';
|
||||||
|
for (const item of response.result.tree) {
|
||||||
|
if (item.path === 'versions-manifest.json') {
|
||||||
|
manifestUrl = item.url;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
headers['accept'] = 'application/vnd.github.VERSION.raw';
|
||||||
|
let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody();
|
||||||
|
if (versionsRaw) {
|
||||||
|
// shouldn't be needed but protects against invalid json saved with BOM
|
||||||
|
versionsRaw = versionsRaw.replace(/^\uFEFF/, '');
|
||||||
|
try {
|
||||||
|
releases = JSON.parse(versionsRaw);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
core.debug('Invalid json');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return releases;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.getManifestFromRepo = getManifestFromRepo;
|
||||||
|
function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
// wrap the internal impl
|
||||||
|
const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter);
|
||||||
|
return match;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.findFromManifest = findFromManifest;
|
||||||
|
function _createExtractFolder(dest) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (!dest) {
|
||||||
|
// create a temp dir
|
||||||
|
dest = path.join(_getTempDirectory(), v4_1.default());
|
||||||
|
}
|
||||||
|
yield io.mkdirP(dest);
|
||||||
|
return dest;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function _createToolPath(tool, version, arch) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
||||||
|
core.debug(`destination ${folderPath}`);
|
||||||
|
const markerPath = `${folderPath}.complete`;
|
||||||
|
yield io.rmRF(folderPath);
|
||||||
|
yield io.rmRF(markerPath);
|
||||||
|
yield io.mkdirP(folderPath);
|
||||||
|
return folderPath;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function _completeToolPath(tool, version, arch) {
|
||||||
|
const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || '');
|
||||||
|
const markerPath = `${folderPath}.complete`;
|
||||||
|
fs.writeFileSync(markerPath, '');
|
||||||
|
core.debug('finished caching tool');
|
||||||
|
}
|
||||||
|
function _isExplicitVersion(versionSpec) {
|
||||||
|
const c = semver.clean(versionSpec) || '';
|
||||||
|
core.debug(`isExplicit: ${c}`);
|
||||||
|
const valid = semver.valid(c) != null;
|
||||||
|
core.debug(`explicit? ${valid}`);
|
||||||
|
return valid;
|
||||||
|
}
|
||||||
|
function _evaluateVersions(versions, versionSpec) {
|
||||||
|
let version = '';
|
||||||
|
core.debug(`evaluating ${versions.length} versions`);
|
||||||
|
versions = versions.sort((a, b) => {
|
||||||
|
if (semver.gt(a, b)) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
});
|
||||||
|
for (let i = versions.length - 1; i >= 0; i--) {
|
||||||
|
const potential = versions[i];
|
||||||
|
const satisfied = semver.satisfies(potential, versionSpec);
|
||||||
|
if (satisfied) {
|
||||||
|
version = potential;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (version) {
|
||||||
|
core.debug(`matched: ${version}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.debug('match not found');
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets RUNNER_TOOL_CACHE
|
||||||
|
*/
|
||||||
|
function _getCacheDirectory() {
|
||||||
|
const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || '';
|
||||||
|
assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined');
|
||||||
|
return cacheDirectory;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets RUNNER_TEMP
|
||||||
|
*/
|
||||||
|
function _getTempDirectory() {
|
||||||
|
const tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||||
|
assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined');
|
||||||
|
return tempDirectory;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets a global variable
|
||||||
|
*/
|
||||||
|
function _getGlobal(key, defaultValue) {
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
const value = global[key];
|
||||||
|
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||||
|
return value !== undefined ? value : defaultValue;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns an array of unique values.
|
||||||
|
* @param values Values to make unique.
|
||||||
|
*/
|
||||||
|
function _unique(values) {
|
||||||
|
return Array.from(new Set(values));
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=tool-cache.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
/* 187 */,
|
/* 187 */,
|
||||||
/* 188 */,
|
/* 188 */,
|
||||||
/* 189 */,
|
/* 189 */,
|
||||||
@ -13861,7 +13858,7 @@ const base_installer_1 = __webpack_require__(83);
|
|||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
const util_1 = __webpack_require__(322);
|
const util_1 = __webpack_require__(322);
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
class MicrosoftDistributions extends base_installer_1.JavaBase {
|
class MicrosoftDistributions extends base_installer_1.JavaBase {
|
||||||
@ -15207,7 +15204,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
//
|
//
|
||||||
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
||||||
// on 64-bit systems), split the download into multiple segments
|
// on 64-bit systems), split the download into multiple segments
|
||||||
const maxSegmentSize = buffer.constants.MAX_LENGTH;
|
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
|
||||||
|
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
|
||||||
const downloadProgress = new DownloadProgress(contentLength);
|
const downloadProgress = new DownloadProgress(contentLength);
|
||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
@ -18764,13 +18762,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isJobStatusSuccess = exports.getToolcachePath = exports.isVersionSatisfies = exports.getDownloadArchiveExtension = exports.extractJdkFile = exports.getVersionFromToolcachePath = exports.getBooleanInput = exports.getTempDir = void 0;
|
exports.isCacheFeatureAvailable = exports.isGhes = exports.isJobStatusSuccess = exports.getToolcachePath = exports.isVersionSatisfies = exports.getDownloadArchiveExtension = exports.extractJdkFile = exports.getVersionFromToolcachePath = exports.getBooleanInput = exports.getTempDir = void 0;
|
||||||
const os_1 = __importDefault(__webpack_require__(87));
|
const os_1 = __importDefault(__webpack_require__(87));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const semver = __importStar(__webpack_require__(876));
|
const semver = __importStar(__webpack_require__(876));
|
||||||
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const constants_1 = __webpack_require__(211);
|
const constants_1 = __webpack_require__(211);
|
||||||
function getTempDir() {
|
function getTempDir() {
|
||||||
let tempDirectory = process.env['RUNNER_TEMP'] || os_1.default.tmpdir();
|
let tempDirectory = process.env['RUNNER_TEMP'] || os_1.default.tmpdir();
|
||||||
@ -18841,6 +18840,24 @@ function isJobStatusSuccess() {
|
|||||||
return jobStatus === 'success';
|
return jobStatus === 'success';
|
||||||
}
|
}
|
||||||
exports.isJobStatusSuccess = isJobStatusSuccess;
|
exports.isJobStatusSuccess = isJobStatusSuccess;
|
||||||
|
function isGhes() {
|
||||||
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
|
}
|
||||||
|
exports.isGhes = isGhes;
|
||||||
|
function isCacheFeatureAvailable() {
|
||||||
|
if (!cache.isFeatureAvailable()) {
|
||||||
|
if (isGhes()) {
|
||||||
|
throw new Error('Caching is only supported on GHES version >= 3.5. If you are on a version >= 3.5, please check with your GHES admin if the Actions cache service is enabled or not.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning('The runner was not able to contact the cache service. Caching will be skipped');
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
exports.isCacheFeatureAvailable = isCacheFeatureAvailable;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -28720,7 +28737,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.TemurinDistribution = exports.TemurinImplementation = void 0;
|
exports.TemurinDistribution = exports.TemurinImplementation = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
@ -37603,7 +37620,7 @@ exports.document_adopt = document_adopt;
|
|||||||
/* 494 */
|
/* 494 */
|
||||||
/***/ (function(module, __unusedexports, __webpack_require__) {
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||||||
|
|
||||||
var rng = __webpack_require__(58);
|
var rng = __webpack_require__(139);
|
||||||
var bytesToUuid = __webpack_require__(722);
|
var bytesToUuid = __webpack_require__(722);
|
||||||
|
|
||||||
function v4(options, buf, offset) {
|
function v4(options, buf, offset) {
|
||||||
@ -38471,7 +38488,7 @@ const base_installer_1 = __webpack_require__(83);
|
|||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
const util_1 = __webpack_require__(322);
|
const util_1 = __webpack_require__(322);
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const supportedPlatform = `'linux', 'linux-musl', 'macos', 'solaris', 'windows'`;
|
const supportedPlatform = `'linux', 'linux-musl', 'macos', 'solaris', 'windows'`;
|
||||||
@ -44851,7 +44868,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.AdoptDistribution = exports.AdoptImplementation = void 0;
|
exports.AdoptDistribution = exports.AdoptImplementation = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
@ -53946,6 +53963,15 @@ function checkKey(key) {
|
|||||||
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
|
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* isFeatureAvailable to check the presence of Actions cache service
|
||||||
|
*
|
||||||
|
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||||
|
*/
|
||||||
|
function isFeatureAvailable() {
|
||||||
|
return !!process.env['ACTIONS_CACHE_URL'];
|
||||||
|
}
|
||||||
|
exports.isFeatureAvailable = isFeatureAvailable;
|
||||||
/**
|
/**
|
||||||
* Restores cache from keys
|
* Restores cache from keys
|
||||||
*
|
*
|
||||||
@ -60015,7 +60041,7 @@ function run() {
|
|||||||
const matchersPath = path.join(__dirname, '..', '..', '.github');
|
const matchersPath = path.join(__dirname, '..', '..', '.github');
|
||||||
core.info(`##[add-matcher]${path.join(matchersPath, 'java.json')}`);
|
core.info(`##[add-matcher]${path.join(matchersPath, 'java.json')}`);
|
||||||
yield auth.configureAuthentication();
|
yield auth.configureAuthentication();
|
||||||
if (cache) {
|
if (cache && util_1.isCacheFeatureAvailable()) {
|
||||||
yield cache_1.restore(cache);
|
yield cache_1.restore(cache);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -88268,7 +88294,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.ZuluDistribution = void 0;
|
exports.ZuluDistribution = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const tc = __importStar(__webpack_require__(139));
|
const tc = __importStar(__webpack_require__(186));
|
||||||
const path_1 = __importDefault(__webpack_require__(622));
|
const path_1 = __importDefault(__webpack_require__(622));
|
||||||
const fs_1 = __importDefault(__webpack_require__(747));
|
const fs_1 = __importDefault(__webpack_require__(747));
|
||||||
const semver_1 = __importDefault(__webpack_require__(876));
|
const semver_1 = __importDefault(__webpack_require__(876));
|
||||||
@ -99020,7 +99046,7 @@ exports.checkBypass = checkBypass;
|
|||||||
/* 953 */
|
/* 953 */
|
||||||
/***/ (function(module, __unusedexports, __webpack_require__) {
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||||||
|
|
||||||
var rng = __webpack_require__(58);
|
var rng = __webpack_require__(139);
|
||||||
var bytesToUuid = __webpack_require__(722);
|
var bytesToUuid = __webpack_require__(722);
|
||||||
|
|
||||||
// **`v1()` - Generate time-based UUID**
|
// **`v1()` - Generate time-based UUID**
|
||||||
|
|||||||
26
package-lock.json
generated
26
package-lock.json
generated
@ -1,15 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "setup-java",
|
"name": "setup-java",
|
||||||
"version": "2.0.0",
|
"version": "3.1.0",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "setup-java",
|
"name": "setup-java",
|
||||||
"version": "2.0.0",
|
"version": "3.1.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^1.0.8",
|
"@actions/cache": "^2.0.0",
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.4",
|
"@actions/exec": "^1.0.4",
|
||||||
"@actions/glob": "^0.2.0",
|
"@actions/glob": "^0.2.0",
|
||||||
@ -32,17 +32,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "1.0.8",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.0.tgz",
|
||||||
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
|
"integrity": "sha512-d7n8ul6HjWX6oDrNEPoqn8ZvqyyDhp9Uek6WOxALyxGVsXU+8+ND+viD3UfrXVWfs/GQiqI5Eq4cOozZj0yRFQ==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^1.0.9",
|
"@actions/http-client": "^1.0.9",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@azure/ms-rest-js": "^2.0.7",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
"@azure/storage-blob": "^12.1.2",
|
"@azure/storage-blob": "^12.8.0",
|
||||||
"semver": "^6.1.0",
|
"semver": "^6.1.0",
|
||||||
"uuid": "^3.3.3"
|
"uuid": "^3.3.3"
|
||||||
}
|
}
|
||||||
@ -6662,17 +6662,17 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "1.0.8",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.0.tgz",
|
||||||
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
|
"integrity": "sha512-d7n8ul6HjWX6oDrNEPoqn8ZvqyyDhp9Uek6WOxALyxGVsXU+8+ND+viD3UfrXVWfs/GQiqI5Eq4cOozZj0yRFQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^1.0.9",
|
"@actions/http-client": "^1.0.9",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@azure/ms-rest-js": "^2.0.7",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
"@azure/storage-blob": "^12.1.2",
|
"@azure/storage-blob": "^12.8.0",
|
||||||
"semver": "^6.1.0",
|
"semver": "^6.1.0",
|
||||||
"uuid": "^3.3.3"
|
"uuid": "^3.3.3"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "setup-java",
|
"name": "setup-java",
|
||||||
"version": "2.0.0",
|
"version": "3.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "setup java action",
|
"description": "setup java action",
|
||||||
"main": "dist/setup/index.js",
|
"main": "dist/setup/index.js",
|
||||||
@ -24,7 +24,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^1.0.8",
|
"@actions/cache": "^2.0.0",
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.4",
|
"@actions/exec": "^1.0.4",
|
||||||
"@actions/glob": "^0.2.0",
|
"@actions/glob": "^0.2.0",
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import * as auth from './auth';
|
import * as auth from './auth';
|
||||||
import { getBooleanInput } from './util';
|
import { getBooleanInput, isCacheFeatureAvailable } from './util';
|
||||||
import * as constants from './constants';
|
import * as constants from './constants';
|
||||||
import { restore } from './cache';
|
import { restore } from './cache';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
@ -42,7 +42,7 @@ async function run() {
|
|||||||
core.info(`##[add-matcher]${path.join(matchersPath, 'java.json')}`);
|
core.info(`##[add-matcher]${path.join(matchersPath, 'java.json')}`);
|
||||||
|
|
||||||
await auth.configureAuthentication();
|
await auth.configureAuthentication();
|
||||||
if (cache) {
|
if (cache && isCacheFeatureAvailable()) {
|
||||||
await restore(cache);
|
await restore(cache);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
22
src/util.ts
22
src/util.ts
@ -2,6 +2,7 @@ import os from 'os';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as semver from 'semver';
|
import * as semver from 'semver';
|
||||||
|
import * as cache from '@actions/cache';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
|
|
||||||
import * as tc from '@actions/tool-cache';
|
import * as tc from '@actions/tool-cache';
|
||||||
@ -77,3 +78,24 @@ export function isJobStatusSuccess() {
|
|||||||
|
|
||||||
return jobStatus === 'success';
|
return jobStatus === 'success';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isGhes(): boolean {
|
||||||
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isCacheFeatureAvailable(): boolean {
|
||||||
|
if (!cache.isFeatureAvailable()) {
|
||||||
|
if (isGhes()) {
|
||||||
|
throw new Error(
|
||||||
|
'Caching is only supported on GHES version >= 3.5. If you are on a version >= 3.5, please check with your GHES admin if the Actions cache service is enabled or not.'
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
core.warning('The runner was not able to contact the cache service. Caching will be skipped');
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user