Detect cached folders from multiple directories (#735)
* Add project-dir * Fix find lock file * Remove package-dir input * format & resolve conflicts * Add unit tests * build dist * Apply change request fixes * handle non-dir cache-dependency-path * bump cache version * run checks * Handle globs in cacheDependencyPath * refactor, introduce `cacheDependencyPathToProjectsDirectories` it is necessary for the next PR related yarn optimization * Changes requests * Apply fixes * review fixes * add e2e * Add unique * review updates * review updates second stage * Review fixes 3 * imporve e2e tests
This commit is contained in:
parent
698d50532e
commit
8170e22e8f
27
.github/workflows/e2e-cache.yml
vendored
27
.github/workflows/e2e-cache.yml
vendored
@ -134,3 +134,30 @@ jobs:
|
||||
- name: Verify node and yarn
|
||||
run: __tests__/verify-node.sh "${{ matrix.node-version }}"
|
||||
shell: bash
|
||||
|
||||
yarn-subprojects:
|
||||
name: Test yarn subprojects
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-subprojects.sh
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[debug]Cache Paths:
|
||||
# ##[debug]["sub2/.yarn/cache","sub3/.yarn/cache","../../../.cache/yarn/v6"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
**/*.lock
|
||||
yarn.lock
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
import os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as auth from '../src/authutil';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
|
||||
let rcFile: string;
|
||||
|
||||
|
||||
@ -32,13 +32,13 @@ describe('cache-restore', () => {
|
||||
|
||||
function findCacheFolder(command: string) {
|
||||
switch (command) {
|
||||
case utils.supportedPackageManagers.npm.getCacheFolderCommand:
|
||||
case 'npm config get cache':
|
||||
return npmCachePath;
|
||||
case utils.supportedPackageManagers.pnpm.getCacheFolderCommand:
|
||||
case 'pnpm store path --silent':
|
||||
return pnpmCachePath;
|
||||
case utils.supportedPackageManagers.yarn1.getCacheFolderCommand:
|
||||
case 'yarn cache dir':
|
||||
return yarn1CachePath;
|
||||
case utils.supportedPackageManagers.yarn2.getCacheFolderCommand:
|
||||
case 'yarn config get cacheFolder':
|
||||
return yarn2CachePath;
|
||||
default:
|
||||
return 'packge/not/found';
|
||||
@ -108,7 +108,7 @@ describe('cache-restore', () => {
|
||||
it.each([['npm7'], ['npm6'], ['pnpm6'], ['yarn1'], ['yarn2'], ['random']])(
|
||||
'Throw an error because %s is not supported',
|
||||
async packageManager => {
|
||||
await expect(restoreCache(packageManager)).rejects.toThrow(
|
||||
await expect(restoreCache(packageManager, '')).rejects.toThrow(
|
||||
`Caching for '${packageManager}' is not supported`
|
||||
);
|
||||
}
|
||||
@ -132,7 +132,7 @@ describe('cache-restore', () => {
|
||||
}
|
||||
});
|
||||
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}`
|
||||
@ -163,7 +163,7 @@ describe('cache-restore', () => {
|
||||
});
|
||||
|
||||
restoreCacheSpy.mockImplementationOnce(() => undefined);
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`${packageManager} cache is not found`
|
||||
|
||||
@ -107,18 +107,20 @@ describe('run', () => {
|
||||
describe('Validate unchanged cache is not saved', () => {
|
||||
it('should not save cache for yarn1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -127,18 +129,20 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for yarn2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -147,35 +151,40 @@ describe('run', () => {
|
||||
|
||||
it('should not save cache for npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation(() => npmFileHash);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not save cache for pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation(() => pnpmFileHash);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@ -183,24 +192,22 @@ describe('run', () => {
|
||||
describe('action saves the cache', () => {
|
||||
it('saves cache from yarn 1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -213,24 +220,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from yarn 2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
@ -243,21 +248,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -270,21 +276,22 @@ describe('run', () => {
|
||||
|
||||
it('saves cache from pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return pnpmFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? pnpmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -297,14 +304,15 @@ describe('run', () => {
|
||||
|
||||
it('save with -1 cacheId , should not fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
return -1;
|
||||
});
|
||||
@ -312,9 +320,9 @@ describe('run', () => {
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
@ -327,14 +335,15 @@ describe('run', () => {
|
||||
|
||||
it('saves with error from toolkit, should fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
throw new cache.ValidationError('Validation failed');
|
||||
});
|
||||
@ -342,9 +351,9 @@ describe('run', () => {
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
||||
@ -2,7 +2,17 @@ import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import path from 'path';
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {PackageManagerInfo, isCacheFeatureAvailable} from '../src/cache-utils';
|
||||
import {
|
||||
PackageManagerInfo,
|
||||
isCacheFeatureAvailable,
|
||||
supportedPackageManagers,
|
||||
getCommandOutput
|
||||
} from '../src/cache-utils';
|
||||
import fs from 'fs';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as glob from '@actions/glob';
|
||||
import {Globber} from '@actions/glob';
|
||||
import {MockGlobber} from './mock/glob-mock';
|
||||
|
||||
describe('cache-utils', () => {
|
||||
const versionYarn1 = '1.2.3';
|
||||
@ -30,7 +40,7 @@ describe('cache-utils', () => {
|
||||
it.each<[string, PackageManagerInfo | null]>([
|
||||
['npm', utils.supportedPackageManagers.npm],
|
||||
['pnpm', utils.supportedPackageManagers.pnpm],
|
||||
['yarn', utils.supportedPackageManagers.yarn1],
|
||||
['yarn', utils.supportedPackageManagers.yarn],
|
||||
['yarn1', null],
|
||||
['yarn2', null],
|
||||
['npm7', null]
|
||||
@ -72,4 +82,261 @@ describe('cache-utils', () => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getCacheDirectoriesPaths', () => {
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let lstatSpy: jest.SpyInstance;
|
||||
let globCreateSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
existsSpy.mockImplementation(() => true);
|
||||
|
||||
lstatSpy = jest.spyOn(fs, 'lstatSync');
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => true
|
||||
}));
|
||||
|
||||
globCreateSpy = jest.spyOn(glob, 'create');
|
||||
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/foo', '/bar'])
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
existsSpy.mockRestore();
|
||||
lstatSpy.mockRestore();
|
||||
globCreateSpy.mockRestore();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should return one dir for non yarn',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
// to do not call for a version
|
||||
// call once for get cache folder
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
it('getCacheDirectoriesPaths should return one dir for yarn without cacheDependency', async () => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock'],
|
||||
[supportedPackageManagers.yarn, ''],
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should throw for getCommandOutput returning empty',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
// return empty string to indicate getCacheFolderPath failed
|
||||
// --version still works
|
||||
command.includes('version') ? '1.' : ''
|
||||
);
|
||||
|
||||
await expect(
|
||||
cacheUtils.getCacheDirectories(packageManagerInfo, cacheDependency)
|
||||
).rejects.toThrow(); //'Could not get cache folder path for /dir');
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should nothrow in case of having not directories',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => false
|
||||
}));
|
||||
|
||||
await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(warningSpy).toHaveBeenCalledTimes(1);
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependency}"`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return one dir without cacheDependency',
|
||||
async version => {
|
||||
getCommandOutputSpy.mockImplementationOnce(() => version);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `foo${version}`);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual([`foo${version}`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/tmp/dir1/file', '/tmp/dir2/file'])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency expanding to duplicates',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir1/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 uniq dirs despite duplicate cache directories',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version')
|
||||
? version
|
||||
: `file_${version}_${dirNo++ % 2}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_0`]);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(6);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 4 dirs with multiple globs',
|
||||
async version => {
|
||||
// simulate wrong indents
|
||||
const cacheDependencyPath = `/tmp/dir1/file
|
||||
/tmp/dir2/file
|
||||
/tmp/**/file
|
||||
`;
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file',
|
||||
'/tmp/dir4/file'
|
||||
])
|
||||
);
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
cacheDependencyPath
|
||||
);
|
||||
expect(dirs).toEqual([
|
||||
`file_${version}_1`,
|
||||
`file_${version}_2`,
|
||||
`file_${version}_3`,
|
||||
`file_${version}_4`
|
||||
]);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
18
__tests__/mock/glob-mock.test.ts
Normal file
18
__tests__/mock/glob-mock.test.ts
Normal file
@ -0,0 +1,18 @@
|
||||
import {MockGlobber} from './glob-mock';
|
||||
|
||||
describe('mocked globber tests', () => {
|
||||
it('globber should return generator', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const generator = globber.globGenerator();
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of generator) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
it('globber should return glob', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const result: string[] = await globber.glob();
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
});
|
||||
29
__tests__/mock/glob-mock.ts
Normal file
29
__tests__/mock/glob-mock.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import {Globber} from '@actions/glob';
|
||||
|
||||
export class MockGlobber implements Globber {
|
||||
private readonly expected: string[];
|
||||
constructor(expected: string[]) {
|
||||
this.expected = expected;
|
||||
}
|
||||
getSearchPaths(): string[] {
|
||||
return this.expected.slice();
|
||||
}
|
||||
|
||||
async glob(): Promise<string[]> {
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of this.globGenerator()) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async *globGenerator(): AsyncGenerator<string, void> {
|
||||
for (const e of this.expected) {
|
||||
yield e;
|
||||
}
|
||||
}
|
||||
|
||||
static async create(expected: string[]): Promise<MockGlobber> {
|
||||
return new MockGlobber(expected);
|
||||
}
|
||||
}
|
||||
48
__tests__/prepare-subprojects.sh
Executable file
48
__tests__/prepare-subprojects.sh
Executable file
@ -0,0 +1,48 @@
|
||||
#!/bin/sh -e
|
||||
export YARN_ENABLE_IMMUTABLE_INSTALLS=false
|
||||
rm package.json
|
||||
rm package-lock.json
|
||||
echo "create yarn2 project in the sub2"
|
||||
mkdir sub2
|
||||
cd sub2
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 2.4.3
|
||||
yarn install
|
||||
|
||||
echo "create yarn3 project in the sub3"
|
||||
cd ..
|
||||
mkdir sub3
|
||||
cd sub3
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 3.5.1
|
||||
yarn install
|
||||
|
||||
echo "create yarn1 project in the root"
|
||||
cd ..
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 1.22.19
|
||||
yarn install
|
||||
1436
dist/cache-save/index.js
vendored
1436
dist/cache-save/index.js
vendored
@ -6480,6 +6480,1193 @@ class ExecState extends events.EventEmitter {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8090:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = exports.create = void 0;
|
||||
const internal_globber_1 = __nccwpck_require__(8298);
|
||||
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
function create(patterns, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield internal_globber_1.DefaultGlobber.create(patterns, options);
|
||||
});
|
||||
}
|
||||
exports.create = create;
|
||||
/**
|
||||
* Computes the sha256 hash of a glob
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
function hashFiles(patterns, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let followSymbolicLinks = true;
|
||||
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||
followSymbolicLinks = options.followSymbolicLinks;
|
||||
}
|
||||
const globber = yield create(patterns, { followSymbolicLinks });
|
||||
return internal_hash_files_1.hashFiles(globber);
|
||||
});
|
||||
}
|
||||
exports.hashFiles = hashFiles;
|
||||
//# sourceMappingURL=glob.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1026:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getOptions = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
function getOptions(copy) {
|
||||
const result = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
matchDirectories: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
};
|
||||
if (copy) {
|
||||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||||
result.followSymbolicLinks = copy.followSymbolicLinks;
|
||||
core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
|
||||
}
|
||||
if (typeof copy.implicitDescendants === 'boolean') {
|
||||
result.implicitDescendants = copy.implicitDescendants;
|
||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||
}
|
||||
if (typeof copy.matchDirectories === 'boolean') {
|
||||
result.matchDirectories = copy.matchDirectories;
|
||||
core.debug(`matchDirectories '${result.matchDirectories}'`);
|
||||
}
|
||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.getOptions = getOptions;
|
||||
//# sourceMappingURL=internal-glob-options-helper.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8298:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
||||
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||||
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||||
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||||
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||||
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||||
function fulfill(value) { resume("next", value); }
|
||||
function reject(value) { resume("throw", value); }
|
||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.DefaultGlobber = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class DefaultGlobber {
|
||||
constructor(options) {
|
||||
this.patterns = [];
|
||||
this.searchPaths = [];
|
||||
this.options = globOptionsHelper.getOptions(options);
|
||||
}
|
||||
getSearchPaths() {
|
||||
// Return a copy
|
||||
return this.searchPaths.slice();
|
||||
}
|
||||
glob() {
|
||||
var e_1, _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = [];
|
||||
try {
|
||||
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||||
const itemPath = _c.value;
|
||||
result.push(itemPath);
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
globGenerator() {
|
||||
return __asyncGenerator(this, arguments, function* globGenerator_1() {
|
||||
// Fill in defaults options
|
||||
const options = globOptionsHelper.getOptions(this.options);
|
||||
// Implicit descendants?
|
||||
const patterns = [];
|
||||
for (const pattern of this.patterns) {
|
||||
patterns.push(pattern);
|
||||
if (options.implicitDescendants &&
|
||||
(pattern.trailingSeparator ||
|
||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||
}
|
||||
}
|
||||
// Push the search paths
|
||||
const stack = [];
|
||||
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||||
core.debug(`Search path '${searchPath}'`);
|
||||
// Exists?
|
||||
try {
|
||||
// Intentionally using lstat. Detection for broken symlink
|
||||
// will be performed later (if following symlinks).
|
||||
yield __await(fs.promises.lstat(searchPath));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
|
||||
}
|
||||
// Search
|
||||
const traversalChain = []; // used to detect cycles
|
||||
while (stack.length) {
|
||||
// Pop
|
||||
const item = stack.pop();
|
||||
// Match?
|
||||
const match = patternHelper.match(patterns, item.path);
|
||||
const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
|
||||
if (!match && !partialMatch) {
|
||||
continue;
|
||||
}
|
||||
// Stat
|
||||
const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
|
||||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||||
);
|
||||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||||
if (!stats) {
|
||||
continue;
|
||||
}
|
||||
// Directory
|
||||
if (stats.isDirectory()) {
|
||||
// Matched
|
||||
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
|
||||
yield yield __await(item.path);
|
||||
}
|
||||
// Descend?
|
||||
else if (!partialMatch) {
|
||||
continue;
|
||||
}
|
||||
// Push the child items in reverse
|
||||
const childLevel = item.level + 1;
|
||||
const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
|
||||
stack.push(...childItems.reverse());
|
||||
}
|
||||
// File
|
||||
else if (match & internal_match_kind_1.MatchKind.File) {
|
||||
yield yield __await(item.path);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Constructs a DefaultGlobber
|
||||
*/
|
||||
static create(patterns, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = new DefaultGlobber(options);
|
||||
if (IS_WINDOWS) {
|
||||
patterns = patterns.replace(/\r\n/g, '\n');
|
||||
patterns = patterns.replace(/\r/g, '\n');
|
||||
}
|
||||
const lines = patterns.split('\n').map(x => x.trim());
|
||||
for (const line of lines) {
|
||||
// Empty or comment
|
||||
if (!line || line.startsWith('#')) {
|
||||
continue;
|
||||
}
|
||||
// Pattern
|
||||
else {
|
||||
result.patterns.push(new internal_pattern_1.Pattern(line));
|
||||
}
|
||||
}
|
||||
result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
|
||||
return result;
|
||||
});
|
||||
}
|
||||
static stat(item, options, traversalChain) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Note:
|
||||
// `stat` returns info about the target of a symlink (or symlink chain)
|
||||
// `lstat` returns info about a symlink itself
|
||||
let stats;
|
||||
if (options.followSymbolicLinks) {
|
||||
try {
|
||||
// Use `stat` (following symlinks)
|
||||
stats = yield fs.promises.stat(item.path);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
if (options.omitBrokenSymbolicLinks) {
|
||||
core.debug(`Broken symlink '${item.path}'`);
|
||||
return undefined;
|
||||
}
|
||||
throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Use `lstat` (not following symlinks)
|
||||
stats = yield fs.promises.lstat(item.path);
|
||||
}
|
||||
// Note, isDirectory() returns false for the lstat of a symlink
|
||||
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||||
// Get the realpath
|
||||
const realPath = yield fs.promises.realpath(item.path);
|
||||
// Fixup the traversal chain to match the item level
|
||||
while (traversalChain.length >= item.level) {
|
||||
traversalChain.pop();
|
||||
}
|
||||
// Test for a cycle
|
||||
if (traversalChain.some((x) => x === realPath)) {
|
||||
core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
|
||||
return undefined;
|
||||
}
|
||||
// Update the traversal chain
|
||||
traversalChain.push(realPath);
|
||||
}
|
||||
return stats;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultGlobber = DefaultGlobber;
|
||||
//# sourceMappingURL=internal-globber.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2448:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = void 0;
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
function hashFiles(globber) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let hasMatch = false;
|
||||
const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const result = crypto.createHash('sha256');
|
||||
let count = 0;
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
core.debug(file);
|
||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||
core.debug(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||
continue;
|
||||
}
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
core.debug(`Skip directory '${file}'.`);
|
||||
continue;
|
||||
}
|
||||
const hash = crypto.createHash('sha256');
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
yield pipeline(fs.createReadStream(file), hash);
|
||||
result.write(hash.digest());
|
||||
count++;
|
||||
if (!hasMatch) {
|
||||
hasMatch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
result.end();
|
||||
if (hasMatch) {
|
||||
core.debug(`Found ${count} files to hash.`);
|
||||
return result.digest('hex');
|
||||
}
|
||||
else {
|
||||
core.debug(`No matches found for glob`);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.hashFiles = hashFiles;
|
||||
//# sourceMappingURL=internal-hash-files.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1063:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.MatchKind = void 0;
|
||||
/**
|
||||
* Indicates whether a pattern matches a path
|
||||
*/
|
||||
var MatchKind;
|
||||
(function (MatchKind) {
|
||||
/** Not matched */
|
||||
MatchKind[MatchKind["None"] = 0] = "None";
|
||||
/** Matched if the path is a directory */
|
||||
MatchKind[MatchKind["Directory"] = 1] = "Directory";
|
||||
/** Matched if the path is a regular file */
|
||||
MatchKind[MatchKind["File"] = 2] = "File";
|
||||
/** Matched */
|
||||
MatchKind[MatchKind["All"] = 3] = "All";
|
||||
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
||||
//# sourceMappingURL=internal-match-kind.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1849:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
*
|
||||
* For example, on Linux/macOS:
|
||||
* - `/ => /`
|
||||
* - `/hello => /`
|
||||
*
|
||||
* For example, on Windows:
|
||||
* - `C:\ => C:\`
|
||||
* - `C:\hello => C:\`
|
||||
* - `C: => C:`
|
||||
* - `C:hello => C:`
|
||||
* - `\ => \`
|
||||
* - `\hello => \`
|
||||
* - `\\hello => \\hello`
|
||||
* - `\\hello\world => \\hello\world`
|
||||
*/
|
||||
function dirname(p) {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
p = safeTrimTrailingSeparator(p);
|
||||
// Windows UNC root, e.g. \\hello or \\hello\world
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||||
return p;
|
||||
}
|
||||
// Get dirname
|
||||
let result = path.dirname(p);
|
||||
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||||
result = safeTrimTrailingSeparator(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.dirname = dirname;
|
||||
/**
|
||||
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||||
* or `C:` are expanded based on the current working directory.
|
||||
*/
|
||||
function ensureAbsoluteRoot(root, itemPath) {
|
||||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Already rooted
|
||||
if (hasAbsoluteRoot(itemPath)) {
|
||||
return itemPath;
|
||||
}
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Check for itemPath like C: or C:foo
|
||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||
let cwd = process.cwd();
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
// Drive letter matches cwd? Expand to cwd
|
||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||
// Drive only, e.g. C:
|
||||
if (itemPath.length === 2) {
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}`;
|
||||
}
|
||||
// Drive + path, e.g. C:foo
|
||||
else {
|
||||
if (!cwd.endsWith('\\')) {
|
||||
cwd += '\\';
|
||||
}
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
|
||||
}
|
||||
}
|
||||
// Different drive
|
||||
else {
|
||||
return `${itemPath[0]}:\\${itemPath.substr(2)}`;
|
||||
}
|
||||
}
|
||||
// Check for itemPath like \ or \foo
|
||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||
const cwd = process.cwd();
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||
}
|
||||
}
|
||||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||
// Otherwise ensure root ends with a separator
|
||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||
// Intentionally empty
|
||||
}
|
||||
else {
|
||||
// Append separator
|
||||
root += path.sep;
|
||||
}
|
||||
return root + itemPath;
|
||||
}
|
||||
exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasAbsoluteRoot(itemPath) {
|
||||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \\hello\share or C:\hello
|
||||
return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
|
||||
}
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/');
|
||||
}
|
||||
exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasRoot(itemPath) {
|
||||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \ or \hello or \\hello
|
||||
// E.g. C: or C:\hello
|
||||
return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
|
||||
}
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/');
|
||||
}
|
||||
exports.hasRoot = hasRoot;
|
||||
/**
|
||||
* Removes redundant slashes and converts `/` to `\` on Windows
|
||||
*/
|
||||
function normalizeSeparators(p) {
|
||||
p = p || '';
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Convert slashes on Windows
|
||||
p = p.replace(/\//g, '\\');
|
||||
// Remove redundant slashes
|
||||
const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
|
||||
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
|
||||
}
|
||||
// Remove redundant slashes
|
||||
return p.replace(/\/\/+/g, '/');
|
||||
}
|
||||
exports.normalizeSeparators = normalizeSeparators;
|
||||
/**
|
||||
* Normalizes the path separators and trims the trailing separator (when safe).
|
||||
* For example, `/foo/ => /foo` but `/ => /`
|
||||
*/
|
||||
function safeTrimTrailingSeparator(p) {
|
||||
// Short-circuit if empty
|
||||
if (!p) {
|
||||
return '';
|
||||
}
|
||||
// Normalize separators
|
||||
p = normalizeSeparators(p);
|
||||
// No trailing slash
|
||||
if (!p.endsWith(path.sep)) {
|
||||
return p;
|
||||
}
|
||||
// Check '/' on Linux/macOS and '\' on Windows
|
||||
if (p === path.sep) {
|
||||
return p;
|
||||
}
|
||||
// On Windows check if drive root. E.g. C:\
|
||||
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||||
return p;
|
||||
}
|
||||
// Otherwise trim trailing slash
|
||||
return p.substr(0, p.length - 1);
|
||||
}
|
||||
exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
//# sourceMappingURL=internal-path-helper.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6836:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Path = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
*/
|
||||
class Path {
|
||||
/**
|
||||
* Constructs a Path
|
||||
* @param itemPath Path or array of segments
|
||||
*/
|
||||
constructor(itemPath) {
|
||||
this.segments = [];
|
||||
// String
|
||||
if (typeof itemPath === 'string') {
|
||||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
// Not rooted
|
||||
if (!pathHelper.hasRoot(itemPath)) {
|
||||
this.segments = itemPath.split(path.sep);
|
||||
}
|
||||
// Rooted
|
||||
else {
|
||||
// Add all segments, while not at the root
|
||||
let remaining = itemPath;
|
||||
let dir = pathHelper.dirname(remaining);
|
||||
while (dir !== remaining) {
|
||||
// Add the segment
|
||||
const basename = path.basename(remaining);
|
||||
this.segments.unshift(basename);
|
||||
// Truncate the last segment
|
||||
remaining = dir;
|
||||
dir = pathHelper.dirname(remaining);
|
||||
}
|
||||
// Remainder is the root
|
||||
this.segments.unshift(remaining);
|
||||
}
|
||||
}
|
||||
// Array
|
||||
else {
|
||||
// Must not be empty
|
||||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||
// Each segment
|
||||
for (let i = 0; i < itemPath.length; i++) {
|
||||
let segment = itemPath[i];
|
||||
// Must not be empty
|
||||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||
// Normalize slashes
|
||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||
// Root segment
|
||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
// All other segments
|
||||
else {
|
||||
// Must not contain slash
|
||||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Converts the path to it's string representation
|
||||
*/
|
||||
toString() {
|
||||
// First segment
|
||||
let result = this.segments[0];
|
||||
// All others
|
||||
let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
|
||||
for (let i = 1; i < this.segments.length; i++) {
|
||||
if (skipSlash) {
|
||||
skipSlash = false;
|
||||
}
|
||||
else {
|
||||
result += path.sep;
|
||||
}
|
||||
result += this.segments[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.Path = Path;
|
||||
//# sourceMappingURL=internal-path.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9005:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
* Duplicates and paths under other included paths are filtered out.
|
||||
*/
|
||||
function getSearchPaths(patterns) {
|
||||
// Ignore negate patterns
|
||||
patterns = patterns.filter(x => !x.negate);
|
||||
// Create a map of all search paths
|
||||
const searchPathMap = {};
|
||||
for (const pattern of patterns) {
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath;
|
||||
searchPathMap[key] = 'candidate';
|
||||
}
|
||||
const result = [];
|
||||
for (const pattern of patterns) {
|
||||
// Check if already included
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath;
|
||||
if (searchPathMap[key] === 'included') {
|
||||
continue;
|
||||
}
|
||||
// Check for an ancestor search path
|
||||
let foundAncestor = false;
|
||||
let tempKey = key;
|
||||
let parent = pathHelper.dirname(tempKey);
|
||||
while (parent !== tempKey) {
|
||||
if (searchPathMap[parent]) {
|
||||
foundAncestor = true;
|
||||
break;
|
||||
}
|
||||
tempKey = parent;
|
||||
parent = pathHelper.dirname(tempKey);
|
||||
}
|
||||
// Include the search pattern in the result
|
||||
if (!foundAncestor) {
|
||||
result.push(pattern.searchPath);
|
||||
searchPathMap[key] = 'included';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.getSearchPaths = getSearchPaths;
|
||||
/**
|
||||
* Matches the patterns against the path
|
||||
*/
|
||||
function match(patterns, itemPath) {
|
||||
let result = internal_match_kind_1.MatchKind.None;
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.negate) {
|
||||
result &= ~pattern.match(itemPath);
|
||||
}
|
||||
else {
|
||||
result |= pattern.match(itemPath);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.match = match;
|
||||
/**
|
||||
* Checks whether to descend further into the directory
|
||||
*/
|
||||
function partialMatch(patterns, itemPath) {
|
||||
return patterns.some(x => !x.negate && x.partialMatch(itemPath));
|
||||
}
|
||||
exports.partialMatch = partialMatch;
|
||||
//# sourceMappingURL=internal-pattern-helper.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4536:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Pattern = void 0;
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const minimatch_1 = __nccwpck_require__(3973);
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_path_1 = __nccwpck_require__(6836);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class Pattern {
|
||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||
/**
|
||||
* Indicates whether matches should be excluded from the result set
|
||||
*/
|
||||
this.negate = false;
|
||||
// Pattern overload
|
||||
let pattern;
|
||||
if (typeof patternOrNegate === 'string') {
|
||||
pattern = patternOrNegate.trim();
|
||||
}
|
||||
// Segments overload
|
||||
else {
|
||||
// Convert to pattern
|
||||
segments = segments || [];
|
||||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||
const root = Pattern.getLiteral(segments[0]);
|
||||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||
if (patternOrNegate) {
|
||||
pattern = `!${pattern}`;
|
||||
}
|
||||
}
|
||||
// Negate
|
||||
while (pattern.startsWith('!')) {
|
||||
this.negate = !this.negate;
|
||||
pattern = pattern.substr(1).trim();
|
||||
}
|
||||
// Normalize slashes and ensures absolute root
|
||||
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||
// Segments
|
||||
this.segments = new internal_path_1.Path(pattern).segments;
|
||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||
this.trailingSeparator = pathHelper
|
||||
.normalizeSeparators(pattern)
|
||||
.endsWith(path.sep);
|
||||
pattern = pathHelper.safeTrimTrailingSeparator(pattern);
|
||||
// Search path (literal path prior to the first glob segment)
|
||||
let foundGlob = false;
|
||||
const searchSegments = this.segments
|
||||
.map(x => Pattern.getLiteral(x))
|
||||
.filter(x => !foundGlob && !(foundGlob = x === ''));
|
||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||
// Root RegExp (required when determining partial match)
|
||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||
this.isImplicitPattern = isImplicitPattern;
|
||||
// Create minimatch
|
||||
const minimatchOptions = {
|
||||
dot: true,
|
||||
nobrace: true,
|
||||
nocase: IS_WINDOWS,
|
||||
nocomment: true,
|
||||
noext: true,
|
||||
nonegate: true
|
||||
};
|
||||
pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
|
||||
this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
|
||||
}
|
||||
/**
|
||||
* Matches the pattern against the specified path
|
||||
*/
|
||||
match(itemPath) {
|
||||
// Last segment is globstar?
|
||||
if (this.segments[this.segments.length - 1] === '**') {
|
||||
// Normalize slashes
|
||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||
itemPath = `${itemPath}${path.sep}`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
}
|
||||
// Match
|
||||
if (this.minimatch.match(itemPath)) {
|
||||
return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
|
||||
}
|
||||
return internal_match_kind_1.MatchKind.None;
|
||||
}
|
||||
/**
|
||||
* Indicates whether the pattern may match descendants of the specified path
|
||||
*/
|
||||
partialMatch(itemPath) {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
// matchOne does not handle root path correctly
|
||||
if (pathHelper.dirname(itemPath) === itemPath) {
|
||||
return this.rootRegExp.test(itemPath);
|
||||
}
|
||||
return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
|
||||
}
|
||||
/**
|
||||
* Escapes glob patterns within a path
|
||||
*/
|
||||
static globEscape(s) {
|
||||
return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
|
||||
.replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
|
||||
.replace(/\?/g, '[?]') // escape '?'
|
||||
.replace(/\*/g, '[*]'); // escape '*'
|
||||
}
|
||||
/**
|
||||
* Normalizes slashes and ensures absolute root
|
||||
*/
|
||||
static fixupPattern(pattern, homedir) {
|
||||
// Empty
|
||||
assert_1.default(pattern, 'pattern cannot be empty');
|
||||
// Must not contain `.` segment, unless first segment
|
||||
// Must not contain `..` segment
|
||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||
// Normalize slashes
|
||||
pattern = pathHelper.normalizeSeparators(pattern);
|
||||
// Replace leading `.` segment
|
||||
if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
|
||||
pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
|
||||
}
|
||||
// Replace leading `~` segment
|
||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||
homedir = homedir || os.homedir();
|
||||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||
}
|
||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||
else if (IS_WINDOWS &&
|
||||
(pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
|
||||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
|
||||
if (pattern.length > 2 && !root.endsWith('\\')) {
|
||||
root += '\\';
|
||||
}
|
||||
pattern = Pattern.globEscape(root) + pattern.substr(2);
|
||||
}
|
||||
// Replace relative root, e.g. pattern is \ or \foo
|
||||
else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
|
||||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
|
||||
if (!root.endsWith('\\')) {
|
||||
root += '\\';
|
||||
}
|
||||
pattern = Pattern.globEscape(root) + pattern.substr(1);
|
||||
}
|
||||
// Otherwise ensure absolute root
|
||||
else {
|
||||
pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
|
||||
}
|
||||
return pathHelper.normalizeSeparators(pattern);
|
||||
}
|
||||
/**
|
||||
* Attempts to unescape a pattern segment to create a literal path segment.
|
||||
* Otherwise returns empty string.
|
||||
*/
|
||||
static getLiteral(segment) {
|
||||
let literal = '';
|
||||
for (let i = 0; i < segment.length; i++) {
|
||||
const c = segment[i];
|
||||
// Escape
|
||||
if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
|
||||
literal += segment[++i];
|
||||
continue;
|
||||
}
|
||||
// Wildcard
|
||||
else if (c === '*' || c === '?') {
|
||||
return '';
|
||||
}
|
||||
// Character set
|
||||
else if (c === '[' && i + 1 < segment.length) {
|
||||
let set = '';
|
||||
let closed = -1;
|
||||
for (let i2 = i + 1; i2 < segment.length; i2++) {
|
||||
const c2 = segment[i2];
|
||||
// Escape
|
||||
if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
|
||||
set += segment[++i2];
|
||||
continue;
|
||||
}
|
||||
// Closed
|
||||
else if (c2 === ']') {
|
||||
closed = i2;
|
||||
break;
|
||||
}
|
||||
// Otherwise
|
||||
else {
|
||||
set += c2;
|
||||
}
|
||||
}
|
||||
// Closed?
|
||||
if (closed >= 0) {
|
||||
// Cannot convert
|
||||
if (set.length > 1) {
|
||||
return '';
|
||||
}
|
||||
// Convert to literal
|
||||
if (set) {
|
||||
literal += set;
|
||||
i = closed;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Otherwise fall thru
|
||||
}
|
||||
// Append
|
||||
literal += c;
|
||||
}
|
||||
return literal;
|
||||
}
|
||||
/**
|
||||
* Escapes regexp special characters
|
||||
* https://javascript.info/regexp-escaping
|
||||
*/
|
||||
static regExpEscape(s) {
|
||||
return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
|
||||
}
|
||||
}
|
||||
exports.Pattern = Pattern;
|
||||
//# sourceMappingURL=internal-pattern.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9117:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.SearchState = void 0;
|
||||
class SearchState {
|
||||
constructor(path, level) {
|
||||
this.path = path;
|
||||
this.level = level;
|
||||
}
|
||||
}
|
||||
exports.SearchState = SearchState;
|
||||
//# sourceMappingURL=internal-search-state.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1962:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
@ -59155,14 +60342,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.run = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const cache = __importStar(__nccwpck_require__(7799));
|
||||
const fs_1 = __importDefault(__nccwpck_require__(7147));
|
||||
const constants_1 = __nccwpck_require__(9042);
|
||||
const cache_utils_1 = __nccwpck_require__(1678);
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
@ -59187,20 +60370,23 @@ exports.run = run;
|
||||
const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const state = core.getState(constants_1.State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(constants_1.State.CachePrimaryKey);
|
||||
const cachePaths = JSON.parse(core.getState(constants_1.State.CachePaths) || '[]');
|
||||
const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
core.debug(`Caching for '${packageManager}' is not supported`);
|
||||
return;
|
||||
}
|
||||
const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager);
|
||||
if (!fs_1.default.existsSync(cachePath)) {
|
||||
throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`);
|
||||
if (cachePaths.length === 0) {
|
||||
// TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?)
|
||||
// export declare function getInput(name: string, options?: InputOptions): string;
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path') || '';
|
||||
throw new Error(`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`);
|
||||
}
|
||||
if (primaryKey === state) {
|
||||
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
const cacheId = yield cache.saveCache([cachePath], primaryKey);
|
||||
const cacheId = yield cache.saveCache(cachePaths, primaryKey);
|
||||
if (cacheId == -1) {
|
||||
return;
|
||||
}
|
||||
@ -59244,31 +60430,47 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectoryPath = exports.getPackageManagerInfo = exports.getCommandOutput = exports.supportedPackageManagers = void 0;
|
||||
exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectories = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const exec = __importStar(__nccwpck_require__(1514));
|
||||
const cache = __importStar(__nccwpck_require__(7799));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const path_1 = __importDefault(__nccwpck_require__(1017));
|
||||
const fs_1 = __importDefault(__nccwpck_require__(7147));
|
||||
const util_1 = __nccwpck_require__(2629);
|
||||
exports.supportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () => exports.getCommandOutputNotEmpty('npm config get cache', 'Could not get npm cache folder path')
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () => exports.getCommandOutputNotEmpty('pnpm store path --silent', 'Could not get pnpm cache folder path')
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const yarnVersion = yield exports.getCommandOutputNotEmpty(`yarn --version`, 'Could not retrieve version of yarn', projectDir);
|
||||
core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`);
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? yield exports.getCommandOutput('yarn cache dir', projectDir)
|
||||
: yield exports.getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get yarn cache folder path for ${projectDir}`);
|
||||
}
|
||||
return stdOut;
|
||||
})
|
||||
}
|
||||
};
|
||||
const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true });
|
||||
const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd })));
|
||||
if (exitCode) {
|
||||
stderr = !stderr.trim()
|
||||
? `The '${toolCommand}' command failed with exit code: ${exitCode}`
|
||||
@ -59278,13 +60480,14 @@ const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, func
|
||||
return stdout.trim();
|
||||
});
|
||||
exports.getCommandOutput = getCommandOutput;
|
||||
const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`);
|
||||
const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = exports.getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
return stdOut;
|
||||
});
|
||||
exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty;
|
||||
const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
if (packageManager === 'npm') {
|
||||
return exports.supportedPackageManagers.npm;
|
||||
@ -59293,29 +60496,75 @@ const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void
|
||||
return exports.supportedPackageManagers.pnpm;
|
||||
}
|
||||
else if (packageManager === 'yarn') {
|
||||
const yarnVersion = yield getPackageManagerVersion('yarn', '--version');
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return exports.supportedPackageManagers.yarn1;
|
||||
}
|
||||
else {
|
||||
return exports.supportedPackageManagers.yarn2;
|
||||
}
|
||||
return exports.supportedPackageManagers.yarn;
|
||||
}
|
||||
else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
exports.getPackageManagerInfo = getPackageManagerInfo;
|
||||
const getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
}
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
return stdOut.trim();
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const globber = yield glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = yield globber.glob();
|
||||
const existingDirectories = cacheDependenciesPaths
|
||||
.map(path_1.default.dirname)
|
||||
.filter(util_1.unique())
|
||||
.filter(directory => fs_1.default.lstatSync(directory).isDirectory());
|
||||
if (!existingDirectories.length)
|
||||
core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`);
|
||||
return existingDirectories;
|
||||
});
|
||||
exports.getCacheDirectoryPath = getCacheDirectoryPath;
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath);
|
||||
const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const cacheFolderPath = packageManagerInfo.getCacheFolderPath(projectDirectory);
|
||||
core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`);
|
||||
return cacheFolderPath;
|
||||
})));
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(util_1.unique());
|
||||
});
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`);
|
||||
return [cacheFolderPath];
|
||||
});
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath);
|
||||
}
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
});
|
||||
exports.getCacheDirectories = getCacheDirectories;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||
@ -59353,6 +60602,7 @@ var State;
|
||||
(function (State) {
|
||||
State["CachePrimaryKey"] = "CACHE_KEY";
|
||||
State["CacheMatchedKey"] = "CACHE_RESULT";
|
||||
State["CachePaths"] = "CACHE_PATHS";
|
||||
})(State = exports.State || (exports.State = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
@ -59360,6 +60610,116 @@ var Outputs;
|
||||
})(Outputs = exports.Outputs || (exports.Outputs = {}));
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2629:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.unique = exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const exec = __importStar(__nccwpck_require__(1514));
|
||||
function parseNodeVersionFile(contents) {
|
||||
var _a, _b, _c;
|
||||
let nodeVersion;
|
||||
// Try parsing the file as an NPM `package.json` file.
|
||||
try {
|
||||
nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node;
|
||||
if (!nodeVersion)
|
||||
nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node;
|
||||
}
|
||||
catch (_d) {
|
||||
core.info('Node version file is not JSON file');
|
||||
}
|
||||
if (!nodeVersion) {
|
||||
const found = contents.match(/^(?:nodejs\s+)?v?(?<version>[^\s]+)$/m);
|
||||
nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version;
|
||||
}
|
||||
// In the case of an unknown format,
|
||||
// return as is and evaluate the version separately.
|
||||
if (!nodeVersion)
|
||||
nodeVersion = contents.trim();
|
||||
return nodeVersion;
|
||||
}
|
||||
exports.parseNodeVersionFile = parseNodeVersionFile;
|
||||
function printEnvDetailsAndSetOutput() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.startGroup('Environment details');
|
||||
const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () {
|
||||
const output = yield getToolVersion(tool, ['--version']);
|
||||
return { tool, output };
|
||||
}));
|
||||
const tools = yield Promise.all(promises);
|
||||
tools.forEach(({ tool, output }) => {
|
||||
if (tool === 'node') {
|
||||
core.setOutput(`${tool}-version`, output);
|
||||
}
|
||||
core.info(`${tool}: ${output}`);
|
||||
});
|
||||
core.endGroup();
|
||||
});
|
||||
}
|
||||
exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput;
|
||||
function getToolVersion(tool, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true
|
||||
});
|
||||
if (exitCode > 0) {
|
||||
core.info(`[warning]${stderr}`);
|
||||
return '';
|
||||
}
|
||||
return stdout.trim();
|
||||
}
|
||||
catch (err) {
|
||||
return '';
|
||||
}
|
||||
});
|
||||
}
|
||||
const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value) => {
|
||||
if (encountered.has(value))
|
||||
return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
||||
exports.unique = unique;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2877:
|
||||
|
||||
141
dist/setup/index.js
vendored
141
dist/setup/index.js
vendored
@ -71144,7 +71144,8 @@ const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0,
|
||||
throw new Error(`Caching for '${packageManager}' is not supported`);
|
||||
}
|
||||
const platform = process.env.RUNNER_OS;
|
||||
const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager);
|
||||
const cachePaths = yield cache_utils_1.getCacheDirectories(packageManagerInfo, cacheDependencyPath);
|
||||
core.saveState(constants_1.State.CachePaths, cachePaths);
|
||||
const lockFilePath = cacheDependencyPath
|
||||
? cacheDependencyPath
|
||||
: findLockFile(packageManagerInfo);
|
||||
@ -71155,7 +71156,7 @@ const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0,
|
||||
const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`;
|
||||
core.debug(`primary key is ${primaryKey}`);
|
||||
core.saveState(constants_1.State.CachePrimaryKey, primaryKey);
|
||||
const cacheKey = yield cache.restoreCache([cachePath], primaryKey);
|
||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey);
|
||||
core.setOutput('cache-hit', Boolean(cacheKey));
|
||||
if (!cacheKey) {
|
||||
core.info(`${packageManager} cache is not found`);
|
||||
@ -71212,31 +71213,47 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectoryPath = exports.getPackageManagerInfo = exports.getCommandOutput = exports.supportedPackageManagers = void 0;
|
||||
exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectories = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const exec = __importStar(__nccwpck_require__(1514));
|
||||
const cache = __importStar(__nccwpck_require__(7799));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const path_1 = __importDefault(__nccwpck_require__(1017));
|
||||
const fs_1 = __importDefault(__nccwpck_require__(7147));
|
||||
const util_1 = __nccwpck_require__(2629);
|
||||
exports.supportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () => exports.getCommandOutputNotEmpty('npm config get cache', 'Could not get npm cache folder path')
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () => exports.getCommandOutputNotEmpty('pnpm store path --silent', 'Could not get pnpm cache folder path')
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const yarnVersion = yield exports.getCommandOutputNotEmpty(`yarn --version`, 'Could not retrieve version of yarn', projectDir);
|
||||
core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`);
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? yield exports.getCommandOutput('yarn cache dir', projectDir)
|
||||
: yield exports.getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get yarn cache folder path for ${projectDir}`);
|
||||
}
|
||||
return stdOut;
|
||||
})
|
||||
}
|
||||
};
|
||||
const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true });
|
||||
const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd })));
|
||||
if (exitCode) {
|
||||
stderr = !stderr.trim()
|
||||
? `The '${toolCommand}' command failed with exit code: ${exitCode}`
|
||||
@ -71246,13 +71263,14 @@ const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, func
|
||||
return stdout.trim();
|
||||
});
|
||||
exports.getCommandOutput = getCommandOutput;
|
||||
const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`);
|
||||
const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = exports.getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
return stdOut;
|
||||
});
|
||||
exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty;
|
||||
const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
if (packageManager === 'npm') {
|
||||
return exports.supportedPackageManagers.npm;
|
||||
@ -71261,29 +71279,75 @@ const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void
|
||||
return exports.supportedPackageManagers.pnpm;
|
||||
}
|
||||
else if (packageManager === 'yarn') {
|
||||
const yarnVersion = yield getPackageManagerVersion('yarn', '--version');
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return exports.supportedPackageManagers.yarn1;
|
||||
}
|
||||
else {
|
||||
return exports.supportedPackageManagers.yarn2;
|
||||
}
|
||||
return exports.supportedPackageManagers.yarn;
|
||||
}
|
||||
else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
exports.getPackageManagerInfo = getPackageManagerInfo;
|
||||
const getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
}
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
return stdOut.trim();
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const globber = yield glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = yield globber.glob();
|
||||
const existingDirectories = cacheDependenciesPaths
|
||||
.map(path_1.default.dirname)
|
||||
.filter(util_1.unique())
|
||||
.filter(directory => fs_1.default.lstatSync(directory).isDirectory());
|
||||
if (!existingDirectories.length)
|
||||
core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`);
|
||||
return existingDirectories;
|
||||
});
|
||||
exports.getCacheDirectoryPath = getCacheDirectoryPath;
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath);
|
||||
const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const cacheFolderPath = packageManagerInfo.getCacheFolderPath(projectDirectory);
|
||||
core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`);
|
||||
return cacheFolderPath;
|
||||
})));
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(util_1.unique());
|
||||
});
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`);
|
||||
return [cacheFolderPath];
|
||||
});
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath);
|
||||
}
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
});
|
||||
exports.getCacheDirectories = getCacheDirectories;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||
@ -71321,6 +71385,7 @@ var State;
|
||||
(function (State) {
|
||||
State["CachePrimaryKey"] = "CACHE_KEY";
|
||||
State["CacheMatchedKey"] = "CACHE_RESULT";
|
||||
State["CachePaths"] = "CACHE_PATHS";
|
||||
})(State = exports.State || (exports.State = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
@ -72159,7 +72224,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0;
|
||||
exports.unique = exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const exec = __importStar(__nccwpck_require__(1514));
|
||||
function parseNodeVersionFile(contents) {
|
||||
@ -72221,6 +72286,16 @@ function getToolVersion(tool, options) {
|
||||
}
|
||||
});
|
||||
}
|
||||
const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value) => {
|
||||
if (encountered.has(value))
|
||||
return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
||||
exports.unique = unique;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
@ -6,14 +6,14 @@ import fs from 'fs';
|
||||
|
||||
import {State} from './constants';
|
||||
import {
|
||||
getCacheDirectoryPath,
|
||||
getCacheDirectories,
|
||||
getPackageManagerInfo,
|
||||
PackageManagerInfo
|
||||
} from './cache-utils';
|
||||
|
||||
export const restoreCache = async (
|
||||
packageManager: string,
|
||||
cacheDependencyPath?: string
|
||||
cacheDependencyPath: string
|
||||
) => {
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -21,10 +21,11 @@ export const restoreCache = async (
|
||||
}
|
||||
const platform = process.env.RUNNER_OS;
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
const cachePaths = await getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
cacheDependencyPath
|
||||
);
|
||||
core.saveState(State.CachePaths, cachePaths);
|
||||
const lockFilePath = cacheDependencyPath
|
||||
? cacheDependencyPath
|
||||
: findLockFile(packageManagerInfo);
|
||||
@ -41,7 +42,7 @@ export const restoreCache = async (
|
||||
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const cacheKey = await cache.restoreCache([cachePath], primaryKey);
|
||||
const cacheKey = await cache.restoreCache(cachePaths, primaryKey);
|
||||
core.setOutput('cache-hit', Boolean(cacheKey));
|
||||
|
||||
if (!cacheKey) {
|
||||
@ -56,6 +57,7 @@ export const restoreCache = async (
|
||||
const findLockFile = (packageManager: PackageManagerInfo) => {
|
||||
const lockFiles = packageManager.lockFilePatterns;
|
||||
const workspace = process.env.GITHUB_WORKSPACE!;
|
||||
|
||||
const rootContent = fs.readdirSync(workspace);
|
||||
|
||||
const lockFile = lockFiles.find(item => rootContent.includes(item));
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import fs from 'fs';
|
||||
import {State} from './constants';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {getPackageManagerInfo} from './cache-utils';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
@ -24,6 +23,7 @@ export async function run() {
|
||||
const cachePackages = async (packageManager: string) => {
|
||||
const state = core.getState(State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
const cachePaths = JSON.parse(core.getState(State.CachePaths) || '[]');
|
||||
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
@ -31,14 +31,12 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
);
|
||||
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
if (cachePaths.length === 0) {
|
||||
// TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?)
|
||||
// export declare function getInput(name: string, options?: InputOptions): string;
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path') || '';
|
||||
throw new Error(
|
||||
`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`
|
||||
`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`
|
||||
);
|
||||
}
|
||||
|
||||
@ -49,7 +47,7 @@ const cachePackages = async (packageManager: string) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const cacheId = await cache.saveCache([cachePath], primaryKey);
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey);
|
||||
if (cacheId == -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1,40 +1,79 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
type SupportedPackageManagers = {
|
||||
[prop: string]: PackageManagerInfo;
|
||||
};
|
||||
import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import {unique} from './util';
|
||||
|
||||
export interface PackageManagerInfo {
|
||||
name: string;
|
||||
lockFilePatterns: Array<string>;
|
||||
getCacheFolderCommand: string;
|
||||
getCacheFolderPath: (projectDir?: string) => Promise<string>;
|
||||
}
|
||||
|
||||
interface SupportedPackageManagers {
|
||||
npm: PackageManagerInfo;
|
||||
pnpm: PackageManagerInfo;
|
||||
yarn: PackageManagerInfo;
|
||||
}
|
||||
export const supportedPackageManagers: SupportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'npm config get cache',
|
||||
'Could not get npm cache folder path'
|
||||
)
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'pnpm store path --silent',
|
||||
'Could not get pnpm cache folder path'
|
||||
)
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: async projectDir => {
|
||||
const yarnVersion = await getCommandOutputNotEmpty(
|
||||
`yarn --version`,
|
||||
'Could not retrieve version of yarn',
|
||||
projectDir
|
||||
);
|
||||
|
||||
core.debug(
|
||||
`Consumed yarn version is ${yarnVersion} (working dir: "${
|
||||
projectDir || ''
|
||||
}")`
|
||||
);
|
||||
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? await getCommandOutput('yarn cache dir', projectDir)
|
||||
: await getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(
|
||||
`Could not get yarn cache folder path for ${projectDir}`
|
||||
);
|
||||
}
|
||||
return stdOut;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const getCommandOutput = async (toolCommand: string) => {
|
||||
export const getCommandOutput = async (
|
||||
toolCommand: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
let {stdout, stderr, exitCode} = await exec.getExecOutput(
|
||||
toolCommand,
|
||||
undefined,
|
||||
{ignoreReturnCode: true}
|
||||
{ignoreReturnCode: true, ...(cwd && {cwd})}
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
@ -47,16 +86,15 @@ export const getCommandOutput = async (toolCommand: string) => {
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
const getPackageManagerVersion = async (
|
||||
packageManager: string,
|
||||
command: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(`${packageManager} ${command}`);
|
||||
|
||||
export const getCommandOutputNotEmpty = async (
|
||||
toolCommand: string,
|
||||
error: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
const stdOut = getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return stdOut;
|
||||
};
|
||||
|
||||
@ -66,35 +104,102 @@ export const getPackageManagerInfo = async (packageManager: string) => {
|
||||
} else if (packageManager === 'pnpm') {
|
||||
return supportedPackageManagers.pnpm;
|
||||
} else if (packageManager === 'yarn') {
|
||||
const yarnVersion = await getPackageManagerVersion('yarn', '--version');
|
||||
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return supportedPackageManagers.yarn1;
|
||||
} else {
|
||||
return supportedPackageManagers.yarn2;
|
||||
}
|
||||
return supportedPackageManagers.yarn;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getCacheDirectoryPath = async (
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = async (
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
const globber = await glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = await globber.glob();
|
||||
|
||||
const existingDirectories: string[] = cacheDependenciesPaths
|
||||
.map(path.dirname)
|
||||
.filter(unique())
|
||||
.filter(directory => fs.lstatSync(directory).isDirectory());
|
||||
|
||||
if (!existingDirectories.length)
|
||||
core.warning(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`
|
||||
);
|
||||
|
||||
return existingDirectories;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
packageManager: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(
|
||||
packageManagerInfo.getCacheFolderCommand
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
const projectDirectories = await getProjectDirectoriesFromCacheDependencyPath(
|
||||
cacheDependencyPath
|
||||
);
|
||||
const cacheFoldersPaths = await Promise.all(
|
||||
projectDirectories.map(async projectDirectory => {
|
||||
const cacheFolderPath =
|
||||
packageManagerInfo.getCacheFolderPath(projectDirectory);
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`
|
||||
);
|
||||
return cacheFolderPath;
|
||||
})
|
||||
);
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(unique());
|
||||
};
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = async (
|
||||
packageManagerInfo: PackageManagerInfo
|
||||
): Promise<string[]> => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`
|
||||
);
|
||||
return [cacheFolderPath];
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
export const getCacheDirectories = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
|
||||
return stdOut.trim();
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
};
|
||||
|
||||
export function isGhes(): boolean {
|
||||
|
||||
@ -6,7 +6,8 @@ export enum LockType {
|
||||
|
||||
export enum State {
|
||||
CachePrimaryKey = 'CACHE_KEY',
|
||||
CacheMatchedKey = 'CACHE_RESULT'
|
||||
CacheMatchedKey = 'CACHE_RESULT',
|
||||
CachePaths = 'CACHE_PATHS'
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
|
||||
@ -61,3 +61,12 @@ async function getToolVersion(tool: string, options: string[]) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value: unknown): boolean => {
|
||||
if (encountered.has(value)) return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user