mirror of
https://github.com/actions/setup-python.git
synced 2024-11-24 09:29:08 +00:00
Use correct Poetry config when collecting Poetry projects (#447)
* Use correct Poetry config when collecting Poetry projects When collecting Poetry projects for caching, a '**/poetry.lock' glob is used. However, in order to process the Poetry configuration, the "poetry" command is run from the repo's root directory; this causes Poetry to return an invalid configuration when there is a Poetry project inside an inner directory. Instead of running a single Poetry command, glob for the same pattern, and run a Poetry command for every discovered project. * Fix typo: saveSatetSpy -> saveStateSpy * poetry: Support same virtualenv appearing in multiple projects * Add nested Poetry projects test * poetry: Set up environment for each project individually * tests/cache-restore: Do not look for dependency files outside `data` When the default dependency path is used for cache distributors, they are looking for the dependency file in the project's root (including the source code), which leads to tests taking a significant amount of time, especially on Windows runners. We thus hit sporadic test failures. Change the test cases such that dependency files are always searched for inside of `__tests__/data`, ignoring the rest of the project. * poetry: Simplify `virtualenvs.in-project` boolean check * README: Explain that poetry might create multiple caches * poetry: Run `poetry env use` only after cache is loaded The virtualenv cache might contain invalid entries, such as virtualenvs built in previous, buggy versions of this action. The `poetry env use` command will recreate virtualenvs in case they are invalid, but it has to be run only *after* the cache is loaded. Refactor `CacheDistributor` a bit such that the validation (and possible recreation) of virtualenvs happens only after the cache is loaded. * poetry: Bump cache primary key
This commit is contained in:
parent
5ccb29d877
commit
8b89ef08a0
9 changed files with 213 additions and 77 deletions
|
@ -55,7 +55,7 @@ The action defaults to searching for a dependency file (`requirements.txt` for p
|
|||
|
||||
- For `pip`, the action will cache the global cache directory
|
||||
- For `pipenv`, the action will cache virtualenv directory
|
||||
- For `poetry`, the action will cache virtualenv directory
|
||||
- For `poetry`, the action will cache virtualenv directories -- one for each poetry project found
|
||||
|
||||
**Caching pip dependencies:**
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as io from '@actions/io';
|
||||
import {getCacheDistributor} from '../src/cache-distributions/cache-factory';
|
||||
import {State} from '../src/cache-distributions/cache-distributor';
|
||||
import * as utils from './../src/utils';
|
||||
|
||||
describe('restore-cache', () => {
|
||||
|
@ -13,7 +15,7 @@ describe('restore-cache', () => {
|
|||
const requirementsLinuxHash =
|
||||
'2d0ff7f46b0e120e3d3294db65768b474934242637b9899b873e6283dfd16d7c';
|
||||
const poetryLockHash =
|
||||
'571bf984f8d210e6a97f854e479fdd4a2b5af67b5fdac109ec337a0ea16e7836';
|
||||
'f24ea1ad73968e6c8d80c16a093ade72d9332c433aeef979a0dd943e6a99b2ab';
|
||||
const poetryConfigOutput = `
|
||||
cache-dir = "/Users/patrick/Library/Caches/pypoetry"
|
||||
experimental.new-installer = false
|
||||
|
@ -27,7 +29,7 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
let infoSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let debugSpy: jest.SpyInstance;
|
||||
let saveSatetSpy: jest.SpyInstance;
|
||||
let saveStateSpy: jest.SpyInstance;
|
||||
let getStateSpy: jest.SpyInstance;
|
||||
let setOutputSpy: jest.SpyInstance;
|
||||
|
||||
|
@ -52,8 +54,8 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
debugSpy = jest.spyOn(core, 'debug');
|
||||
debugSpy.mockImplementation(input => undefined);
|
||||
|
||||
saveSatetSpy = jest.spyOn(core, 'saveState');
|
||||
saveSatetSpy.mockImplementation(input => undefined);
|
||||
saveStateSpy = jest.spyOn(core, 'saveState');
|
||||
saveStateSpy.mockImplementation(input => undefined);
|
||||
|
||||
getStateSpy = jest.spyOn(core, 'getState');
|
||||
getStateSpy.mockImplementation(input => undefined);
|
||||
|
@ -100,21 +102,68 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
|
||||
describe('Restore dependencies', () => {
|
||||
it.each([
|
||||
['pip', '3.8.12', undefined, requirementsHash],
|
||||
['pip', '3.8.12', '**/requirements-linux.txt', requirementsLinuxHash],
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
'__tests__/data/**/requirements.txt',
|
||||
requirementsHash,
|
||||
undefined
|
||||
],
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
'__tests__/data/**/requirements-linux.txt',
|
||||
requirementsLinuxHash,
|
||||
undefined
|
||||
],
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
'__tests__/data/requirements-linux.txt',
|
||||
requirementsLinuxHash
|
||||
requirementsLinuxHash,
|
||||
undefined
|
||||
],
|
||||
['pip', '3.8.12', '__tests__/data/requirements.txt', requirementsHash],
|
||||
['pipenv', '3.9.1', undefined, pipFileLockHash],
|
||||
['pipenv', '3.9.12', '__tests__/data/requirements.txt', requirementsHash],
|
||||
['poetry', '3.9.1', undefined, poetryLockHash]
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
'__tests__/data/requirements.txt',
|
||||
requirementsHash,
|
||||
undefined
|
||||
],
|
||||
[
|
||||
'pipenv',
|
||||
'3.9.1',
|
||||
'__tests__/data/**/Pipfile.lock',
|
||||
pipFileLockHash,
|
||||
undefined
|
||||
],
|
||||
[
|
||||
'pipenv',
|
||||
'3.9.12',
|
||||
'__tests__/data/requirements.txt',
|
||||
requirementsHash,
|
||||
undefined
|
||||
],
|
||||
[
|
||||
'poetry',
|
||||
'3.9.1',
|
||||
'__tests__/data/**/poetry.lock',
|
||||
poetryLockHash,
|
||||
[
|
||||
'/Users/patrick/Library/Caches/pypoetry/virtualenvs',
|
||||
path.join(__dirname, 'data', 'inner', '.venv'),
|
||||
path.join(__dirname, 'data', '.venv')
|
||||
]
|
||||
]
|
||||
])(
|
||||
'restored dependencies for %s by primaryKey',
|
||||
async (packageManager, pythonVersion, dependencyFile, fileHash) => {
|
||||
async (
|
||||
packageManager,
|
||||
pythonVersion,
|
||||
dependencyFile,
|
||||
fileHash,
|
||||
cachePaths
|
||||
) => {
|
||||
const cacheDistributor = getCacheDistributor(
|
||||
packageManager,
|
||||
pythonVersion,
|
||||
|
@ -123,10 +172,21 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
|
||||
await cacheDistributor.restoreCache();
|
||||
|
||||
if (cachePaths !== undefined) {
|
||||
expect(saveStateSpy).toHaveBeenCalledWith(
|
||||
State.CACHE_PATHS,
|
||||
cachePaths
|
||||
);
|
||||
}
|
||||
|
||||
if (process.platform === 'linux' && packageManager === 'pip') {
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: setup-python-${process.env['RUNNER_OS']}-20.04-Ubuntu-python-${pythonVersion}-${packageManager}-${fileHash}`
|
||||
);
|
||||
} else if (packageManager === 'poetry') {
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: setup-python-${process.env['RUNNER_OS']}-python-${pythonVersion}-${packageManager}-v2-${fileHash}`
|
||||
);
|
||||
} else {
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: setup-python-${process.env['RUNNER_OS']}-python-${pythonVersion}-${packageManager}-${fileHash}`
|
||||
|
@ -164,8 +224,13 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
|
||||
describe('Dependencies changed', () => {
|
||||
it.each([
|
||||
['pip', '3.8.12', undefined, pipFileLockHash],
|
||||
['pip', '3.8.12', '**/requirements-linux.txt', pipFileLockHash],
|
||||
['pip', '3.8.12', '__tests__/data/**/requirements.txt', pipFileLockHash],
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
'__tests__/data/**/requirements-linux.txt',
|
||||
pipFileLockHash
|
||||
],
|
||||
[
|
||||
'pip',
|
||||
'3.8.12',
|
||||
|
@ -173,9 +238,9 @@ virtualenvs.path = "{cache-dir}/virtualenvs" # /Users/patrick/Library/Caches/py
|
|||
pipFileLockHash
|
||||
],
|
||||
['pip', '3.8.12', '__tests__/data/requirements.txt', pipFileLockHash],
|
||||
['pipenv', '3.9.1', undefined, requirementsHash],
|
||||
['pipenv', '3.9.1', '__tests__/data/**/Pipfile.lock', requirementsHash],
|
||||
['pipenv', '3.9.12', '__tests__/data/requirements.txt', requirementsHash],
|
||||
['poetry', '3.9.1', undefined, requirementsHash]
|
||||
['poetry', '3.9.1', '__tests__/data/**/poetry.lock', requirementsHash]
|
||||
])(
|
||||
'restored dependencies for %s by primaryKey',
|
||||
async (packageManager, pythonVersion, dependencyFile, fileHash) => {
|
||||
|
|
|
@ -18,7 +18,7 @@ describe('run', () => {
|
|||
let infoSpy: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let debugSpy: jest.SpyInstance;
|
||||
let saveSatetSpy: jest.SpyInstance;
|
||||
let saveStateSpy: jest.SpyInstance;
|
||||
let getStateSpy: jest.SpyInstance;
|
||||
let getInputSpy: jest.SpyInstance;
|
||||
let setFailedSpy: jest.SpyInstance;
|
||||
|
@ -43,8 +43,8 @@ describe('run', () => {
|
|||
debugSpy = jest.spyOn(core, 'debug');
|
||||
debugSpy.mockImplementation(input => undefined);
|
||||
|
||||
saveSatetSpy = jest.spyOn(core, 'saveState');
|
||||
saveSatetSpy.mockImplementation(input => undefined);
|
||||
saveStateSpy = jest.spyOn(core, 'saveState');
|
||||
saveStateSpy.mockImplementation(input => undefined);
|
||||
|
||||
getStateSpy = jest.spyOn(core, 'getState');
|
||||
getStateSpy.mockImplementation(input => {
|
||||
|
|
1
__tests__/data/inner/poetry.lock
generated
Symbolic link
1
__tests__/data/inner/poetry.lock
generated
Symbolic link
|
@ -0,0 +1 @@
|
|||
../poetry.lock
|
1
__tests__/data/inner/pyproject.toml
Symbolic link
1
__tests__/data/inner/pyproject.toml
Symbolic link
|
@ -0,0 +1 @@
|
|||
../pyproject.toml
|
4
dist/cache-save/index.js
vendored
4
dist/cache-save/index.js
vendored
|
@ -59711,6 +59711,9 @@ class CacheDistributor {
|
|||
this.cacheDependencyPath = cacheDependencyPath;
|
||||
this.CACHE_KEY_PREFIX = 'setup-python';
|
||||
}
|
||||
handleLoadedCache() {
|
||||
return __awaiter(this, void 0, void 0, function* () { });
|
||||
}
|
||||
restoreCache() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { primaryKey, restoreKey } = yield this.computeKeys();
|
||||
|
@ -59723,6 +59726,7 @@ class CacheDistributor {
|
|||
core.saveState(State.CACHE_PATHS, cachePath);
|
||||
core.saveState(State.STATE_CACHE_PRIMARY_KEY, primaryKey);
|
||||
const matchedKey = yield cache.restoreCache(cachePath, primaryKey, restoreKey);
|
||||
yield this.handleLoadedCache();
|
||||
this.handleMatchResult(matchedKey, primaryKey);
|
||||
});
|
||||
}
|
||||
|
|
88
dist/setup/index.js
vendored
88
dist/setup/index.js
vendored
|
@ -65787,6 +65787,9 @@ class CacheDistributor {
|
|||
this.cacheDependencyPath = cacheDependencyPath;
|
||||
this.CACHE_KEY_PREFIX = 'setup-python';
|
||||
}
|
||||
handleLoadedCache() {
|
||||
return __awaiter(this, void 0, void 0, function* () { });
|
||||
}
|
||||
restoreCache() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { primaryKey, restoreKey } = yield this.computeKeys();
|
||||
|
@ -65799,6 +65802,7 @@ class CacheDistributor {
|
|||
core.saveState(State.CACHE_PATHS, cachePath);
|
||||
core.saveState(State.STATE_CACHE_PRIMARY_KEY, primaryKey);
|
||||
const matchedKey = yield cache.restoreCache(cachePath, primaryKey, restoreKey);
|
||||
yield this.handleLoadedCache();
|
||||
this.handleMatchResult(matchedKey, primaryKey);
|
||||
});
|
||||
}
|
||||
|
@ -66078,6 +66082,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
|
@ -66090,38 +66101,48 @@ const core = __importStar(__nccwpck_require__(2186));
|
|||
const cache_distributor_1 = __importDefault(__nccwpck_require__(8953));
|
||||
const utils_1 = __nccwpck_require__(1314);
|
||||
class PoetryCache extends cache_distributor_1.default {
|
||||
constructor(pythonVersion, patterns = '**/poetry.lock') {
|
||||
constructor(pythonVersion, patterns = '**/poetry.lock', poetryProjects = new Set()) {
|
||||
super('poetry', patterns);
|
||||
this.pythonVersion = pythonVersion;
|
||||
this.patterns = patterns;
|
||||
this.poetryProjects = poetryProjects;
|
||||
}
|
||||
getCacheGlobalDirectories() {
|
||||
var e_1, _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const poetryConfig = yield this.getPoetryConfiguration();
|
||||
const cacheDir = poetryConfig['cache-dir'];
|
||||
const virtualenvsPath = poetryConfig['virtualenvs.path'].replace('{cache-dir}', cacheDir);
|
||||
const paths = [virtualenvsPath];
|
||||
if (poetryConfig['virtualenvs.in-project'] === true) {
|
||||
paths.push(path.join(process.cwd(), '.venv'));
|
||||
}
|
||||
const pythonLocation = yield io.which('python');
|
||||
if (pythonLocation) {
|
||||
core.debug(`pythonLocation is ${pythonLocation}`);
|
||||
const { exitCode, stderr } = yield exec.getExecOutput(`poetry env use ${pythonLocation}`, undefined, { ignoreReturnCode: true });
|
||||
if (exitCode) {
|
||||
utils_1.logWarning(stderr);
|
||||
// Same virtualenvs path may appear for different projects, hence we use a Set
|
||||
const paths = new Set();
|
||||
const globber = yield glob.create(this.patterns);
|
||||
try {
|
||||
for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||||
const file = _c.value;
|
||||
const basedir = path.dirname(file);
|
||||
core.debug(`Processing Poetry project at ${basedir}`);
|
||||
this.poetryProjects.add(basedir);
|
||||
const poetryConfig = yield this.getPoetryConfiguration(basedir);
|
||||
const cacheDir = poetryConfig['cache-dir'];
|
||||
const virtualenvsPath = poetryConfig['virtualenvs.path'].replace('{cache-dir}', cacheDir);
|
||||
paths.add(virtualenvsPath);
|
||||
if (poetryConfig['virtualenvs.in-project']) {
|
||||
paths.add(path.join(basedir, '.venv'));
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
utils_1.logWarning('python binaries were not found in PATH');
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
return paths;
|
||||
return [...paths];
|
||||
});
|
||||
}
|
||||
computeKeys() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const hash = yield glob.hashFiles(this.patterns);
|
||||
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-${hash}`;
|
||||
// "v2" is here to invalidate old caches of this cache distributor, which were created broken:
|
||||
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-v2-${hash}`;
|
||||
const restoreKey = undefined;
|
||||
return {
|
||||
primaryKey,
|
||||
|
@ -66129,12 +66150,33 @@ class PoetryCache extends cache_distributor_1.default {
|
|||
};
|
||||
});
|
||||
}
|
||||
getPoetryConfiguration() {
|
||||
handleLoadedCache() {
|
||||
const _super = Object.create(null, {
|
||||
handleLoadedCache: { get: () => super.handleLoadedCache }
|
||||
});
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { stdout, stderr, exitCode } = yield exec.getExecOutput('poetry', [
|
||||
'config',
|
||||
'--list'
|
||||
]);
|
||||
yield _super.handleLoadedCache.call(this);
|
||||
// After the cache is loaded -- make sure virtualenvs use the correct Python version (the one that we have just installed).
|
||||
// This will handle invalid caches, recreating virtualenvs if necessary.
|
||||
const pythonLocation = yield io.which('python');
|
||||
if (pythonLocation) {
|
||||
core.debug(`pythonLocation is ${pythonLocation}`);
|
||||
}
|
||||
else {
|
||||
utils_1.logWarning('python binaries were not found in PATH');
|
||||
return;
|
||||
}
|
||||
for (const poetryProject of this.poetryProjects) {
|
||||
const { exitCode, stderr } = yield exec.getExecOutput('poetry', ['env', 'use', pythonLocation], { ignoreReturnCode: true, cwd: poetryProject });
|
||||
if (exitCode) {
|
||||
utils_1.logWarning(stderr);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
getPoetryConfiguration(basedir) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const { stdout, stderr, exitCode } = yield exec.getExecOutput('poetry', ['config', '--list'], { cwd: basedir });
|
||||
if (exitCode && stderr) {
|
||||
throw new Error('Could not get cache folder path for poetry package manager');
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ abstract class CacheDistributor {
|
|||
primaryKey: string;
|
||||
restoreKey: string[] | undefined;
|
||||
}>;
|
||||
protected async handleLoadedCache() {}
|
||||
|
||||
public async restoreCache() {
|
||||
const {primaryKey, restoreKey} = await this.computeKeys();
|
||||
|
@ -41,6 +42,8 @@ abstract class CacheDistributor {
|
|||
restoreKey
|
||||
);
|
||||
|
||||
await this.handleLoadedCache();
|
||||
|
||||
this.handleMatchResult(matchedKey, primaryKey);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,52 +10,44 @@ import {logWarning} from '../utils';
|
|||
class PoetryCache extends CacheDistributor {
|
||||
constructor(
|
||||
private pythonVersion: string,
|
||||
protected patterns: string = '**/poetry.lock'
|
||||
protected patterns: string = '**/poetry.lock',
|
||||
protected poetryProjects: Set<string> = new Set<string>()
|
||||
) {
|
||||
super('poetry', patterns);
|
||||
}
|
||||
|
||||
protected async getCacheGlobalDirectories() {
|
||||
const poetryConfig = await this.getPoetryConfiguration();
|
||||
// Same virtualenvs path may appear for different projects, hence we use a Set
|
||||
const paths = new Set<string>();
|
||||
const globber = await glob.create(this.patterns);
|
||||
|
||||
const cacheDir = poetryConfig['cache-dir'];
|
||||
const virtualenvsPath = poetryConfig['virtualenvs.path'].replace(
|
||||
'{cache-dir}',
|
||||
cacheDir
|
||||
);
|
||||
for await (const file of globber.globGenerator()) {
|
||||
const basedir = path.dirname(file);
|
||||
core.debug(`Processing Poetry project at ${basedir}`);
|
||||
this.poetryProjects.add(basedir);
|
||||
|
||||
const paths = [virtualenvsPath];
|
||||
const poetryConfig = await this.getPoetryConfiguration(basedir);
|
||||
|
||||
if (poetryConfig['virtualenvs.in-project'] === true) {
|
||||
paths.push(path.join(process.cwd(), '.venv'));
|
||||
}
|
||||
|
||||
const pythonLocation = await io.which('python');
|
||||
|
||||
if (pythonLocation) {
|
||||
core.debug(`pythonLocation is ${pythonLocation}`);
|
||||
const {
|
||||
exitCode,
|
||||
stderr
|
||||
} = await exec.getExecOutput(
|
||||
`poetry env use ${pythonLocation}`,
|
||||
undefined,
|
||||
{ignoreReturnCode: true}
|
||||
const cacheDir = poetryConfig['cache-dir'];
|
||||
const virtualenvsPath = poetryConfig['virtualenvs.path'].replace(
|
||||
'{cache-dir}',
|
||||
cacheDir
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
logWarning(stderr);
|
||||
paths.add(virtualenvsPath);
|
||||
|
||||
if (poetryConfig['virtualenvs.in-project']) {
|
||||
paths.add(path.join(basedir, '.venv'));
|
||||
}
|
||||
} else {
|
||||
logWarning('python binaries were not found in PATH');
|
||||
}
|
||||
|
||||
return paths;
|
||||
return [...paths];
|
||||
}
|
||||
|
||||
protected async computeKeys() {
|
||||
const hash = await glob.hashFiles(this.patterns);
|
||||
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-${hash}`;
|
||||
// "v2" is here to invalidate old caches of this cache distributor, which were created broken:
|
||||
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-v2-${hash}`;
|
||||
const restoreKey = undefined;
|
||||
return {
|
||||
primaryKey,
|
||||
|
@ -63,11 +55,39 @@ class PoetryCache extends CacheDistributor {
|
|||
};
|
||||
}
|
||||
|
||||
private async getPoetryConfiguration() {
|
||||
const {stdout, stderr, exitCode} = await exec.getExecOutput('poetry', [
|
||||
'config',
|
||||
'--list'
|
||||
]);
|
||||
protected async handleLoadedCache() {
|
||||
await super.handleLoadedCache();
|
||||
|
||||
// After the cache is loaded -- make sure virtualenvs use the correct Python version (the one that we have just installed).
|
||||
// This will handle invalid caches, recreating virtualenvs if necessary.
|
||||
|
||||
const pythonLocation = await io.which('python');
|
||||
if (pythonLocation) {
|
||||
core.debug(`pythonLocation is ${pythonLocation}`);
|
||||
} else {
|
||||
logWarning('python binaries were not found in PATH');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const poetryProject of this.poetryProjects) {
|
||||
const {exitCode, stderr} = await exec.getExecOutput(
|
||||
'poetry',
|
||||
['env', 'use', pythonLocation],
|
||||
{ignoreReturnCode: true, cwd: poetryProject}
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
logWarning(stderr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async getPoetryConfiguration(basedir: string) {
|
||||
const {stdout, stderr, exitCode} = await exec.getExecOutput(
|
||||
'poetry',
|
||||
['config', '--list'],
|
||||
{cwd: basedir}
|
||||
);
|
||||
|
||||
if (exitCode && stderr) {
|
||||
throw new Error(
|
||||
|
|
Loading…
Reference in a new issue