mirror of
https://github.com/gradle/gradle-build-action.git
synced 2024-11-25 09:40:57 -05:00
Allow entries with same Job ID to match in different workflows
Previously, the workflow name was always included when matching a cache entry for the current job. This can be overly restrictive when job definitions are shared between different workflows. The workflow name is still encoded in the cache entry key, but not in the restore key searching for entries with a matching job. Fixes #1017
This commit is contained in:
parent
270f30ba56
commit
32bab5b15a
7 changed files with 19 additions and 80 deletions
|
@ -259,9 +259,9 @@ This allows the most recent state to always be available in the GitHub actions c
|
|||
### Finding a matching cache entry
|
||||
|
||||
In most cases, no exact match will exist for the cache key. Instead, the Gradle User Home will be restored for the closest matching cache entry, using a set of "restore keys". The entries will be matched with the following precedence:
|
||||
- An exact match on OS, workflow, job, matrix and Git SHA
|
||||
- The most recent entry saved for the same OS, workflow, job and matrix values
|
||||
- The most recent entry saved for the same OS, workflow and job
|
||||
- An exact match on OS, workflow name, job id, matrix and Git SHA
|
||||
- The most recent entry saved for the same OS, workflow name, job id and matrix values
|
||||
- The most recent entry saved for the same OS and job id
|
||||
- The most recent entry saved for the same OS
|
||||
|
||||
Due to branch scoping of cache entries, the above match will be first performed for entries from the same branch, and then for the default ('main') branch.
|
||||
|
|
26
dist/main/index.js
vendored
26
dist/main/index.js
vendored
|
@ -139505,7 +139505,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.saveCache = exports.restoreCache = exports.hashStrings = exports.hashFileNames = exports.getUniqueLabelForJobInstanceValues = exports.getUniqueLabelForJobInstance = exports.getCacheKeyForJob = exports.getCacheKeyPrefix = exports.generateCacheKey = exports.CacheKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||
exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.saveCache = exports.restoreCache = exports.hashStrings = exports.hashFileNames = exports.getCacheKeyPrefix = exports.generateCacheKey = exports.CacheKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const cache = __importStar(__nccwpck_require__(27799));
|
||||
const github = __importStar(__nccwpck_require__(95438));
|
||||
|
@ -139577,34 +139577,16 @@ function getCacheKeyEnvironment() {
|
|||
return process.env[CACHE_KEY_OS_VAR] || runnerOs;
|
||||
}
|
||||
function getCacheKeyJob() {
|
||||
return process.env[CACHE_KEY_JOB_VAR] || getCacheKeyForJob(github.context.workflow, github.context.job);
|
||||
return process.env[CACHE_KEY_JOB_VAR] || github.context.job;
|
||||
}
|
||||
function getCacheKeyForJob(workflowName, jobId) {
|
||||
const sanitizedWorkflow = workflowName.replace(/,/g, '').toLowerCase();
|
||||
return `${sanitizedWorkflow}-${jobId}`;
|
||||
}
|
||||
exports.getCacheKeyForJob = getCacheKeyForJob;
|
||||
function getCacheKeyJobInstance() {
|
||||
const override = process.env[CACHE_KEY_JOB_INSTANCE_VAR];
|
||||
if (override) {
|
||||
return override;
|
||||
}
|
||||
const workflowName = github.context.workflow;
|
||||
const workflowJobContext = params.getJobMatrix();
|
||||
return hashStrings([workflowJobContext]);
|
||||
}
|
||||
function getUniqueLabelForJobInstance() {
|
||||
return getUniqueLabelForJobInstanceValues(github.context.workflow, github.context.job, params.getJobMatrix());
|
||||
}
|
||||
exports.getUniqueLabelForJobInstance = getUniqueLabelForJobInstance;
|
||||
function getUniqueLabelForJobInstanceValues(workflow, jobId, matrixJson) {
|
||||
const matrix = JSON.parse(matrixJson);
|
||||
const matrixString = Object.values(matrix).join('-');
|
||||
const label = matrixString ? `${workflow}-${jobId}-${matrixString}` : `${workflow}-${jobId}`;
|
||||
return sanitize(label);
|
||||
}
|
||||
exports.getUniqueLabelForJobInstanceValues = getUniqueLabelForJobInstanceValues;
|
||||
function sanitize(value) {
|
||||
return value.replace(/[^a-zA-Z0-9_-]/g, '').toLowerCase();
|
||||
return hashStrings([workflowName, workflowJobContext]);
|
||||
}
|
||||
function getCacheKeyJobExecution() {
|
||||
return process.env[CACHE_KEY_JOB_EXECUTION_VAR] || github.context.sha;
|
||||
|
|
2
dist/main/index.js.map
vendored
2
dist/main/index.js.map
vendored
File diff suppressed because one or more lines are too long
26
dist/post/index.js
vendored
26
dist/post/index.js
vendored
|
@ -136958,7 +136958,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.saveCache = exports.restoreCache = exports.hashStrings = exports.hashFileNames = exports.getUniqueLabelForJobInstanceValues = exports.getUniqueLabelForJobInstance = exports.getCacheKeyForJob = exports.getCacheKeyPrefix = exports.generateCacheKey = exports.CacheKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||
exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.saveCache = exports.restoreCache = exports.hashStrings = exports.hashFileNames = exports.getCacheKeyPrefix = exports.generateCacheKey = exports.CacheKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const cache = __importStar(__nccwpck_require__(27799));
|
||||
const github = __importStar(__nccwpck_require__(95438));
|
||||
|
@ -137030,34 +137030,16 @@ function getCacheKeyEnvironment() {
|
|||
return process.env[CACHE_KEY_OS_VAR] || runnerOs;
|
||||
}
|
||||
function getCacheKeyJob() {
|
||||
return process.env[CACHE_KEY_JOB_VAR] || getCacheKeyForJob(github.context.workflow, github.context.job);
|
||||
return process.env[CACHE_KEY_JOB_VAR] || github.context.job;
|
||||
}
|
||||
function getCacheKeyForJob(workflowName, jobId) {
|
||||
const sanitizedWorkflow = workflowName.replace(/,/g, '').toLowerCase();
|
||||
return `${sanitizedWorkflow}-${jobId}`;
|
||||
}
|
||||
exports.getCacheKeyForJob = getCacheKeyForJob;
|
||||
function getCacheKeyJobInstance() {
|
||||
const override = process.env[CACHE_KEY_JOB_INSTANCE_VAR];
|
||||
if (override) {
|
||||
return override;
|
||||
}
|
||||
const workflowName = github.context.workflow;
|
||||
const workflowJobContext = params.getJobMatrix();
|
||||
return hashStrings([workflowJobContext]);
|
||||
}
|
||||
function getUniqueLabelForJobInstance() {
|
||||
return getUniqueLabelForJobInstanceValues(github.context.workflow, github.context.job, params.getJobMatrix());
|
||||
}
|
||||
exports.getUniqueLabelForJobInstance = getUniqueLabelForJobInstance;
|
||||
function getUniqueLabelForJobInstanceValues(workflow, jobId, matrixJson) {
|
||||
const matrix = JSON.parse(matrixJson);
|
||||
const matrixString = Object.values(matrix).join('-');
|
||||
const label = matrixString ? `${workflow}-${jobId}-${matrixString}` : `${workflow}-${jobId}`;
|
||||
return sanitize(label);
|
||||
}
|
||||
exports.getUniqueLabelForJobInstanceValues = getUniqueLabelForJobInstanceValues;
|
||||
function sanitize(value) {
|
||||
return value.replace(/[^a-zA-Z0-9_-]/g, '').toLowerCase();
|
||||
return hashStrings([workflowName, workflowJobContext]);
|
||||
}
|
||||
function getCacheKeyJobExecution() {
|
||||
return process.env[CACHE_KEY_JOB_EXECUTION_VAR] || github.context.sha;
|
||||
|
|
2
dist/post/index.js.map
vendored
2
dist/post/index.js.map
vendored
File diff suppressed because one or more lines are too long
|
@ -86,10 +86,10 @@ export function generateCacheKey(cacheName: string): CacheKey {
|
|||
// At the most general level, share caches for all executions on the same OS
|
||||
const cacheKeyForEnvironment = `${cacheKeyBase}|${getCacheKeyEnvironment()}`
|
||||
|
||||
// Prefer caches that run this job
|
||||
// Then prefer caches that run job with the same ID
|
||||
const cacheKeyForJob = `${cacheKeyForEnvironment}|${getCacheKeyJob()}`
|
||||
|
||||
// Prefer (even more) jobs that run this job with the same context (matrix)
|
||||
// Prefer (even more) jobs that run this job in the same workflow with the same context (matrix)
|
||||
const cacheKeyForJobContext = `${cacheKeyForJob}[${getCacheKeyJobInstance()}]`
|
||||
|
||||
// Exact match on Git SHA
|
||||
|
@ -113,12 +113,7 @@ function getCacheKeyEnvironment(): string {
|
|||
}
|
||||
|
||||
function getCacheKeyJob(): string {
|
||||
return process.env[CACHE_KEY_JOB_VAR] || getCacheKeyForJob(github.context.workflow, github.context.job)
|
||||
}
|
||||
|
||||
export function getCacheKeyForJob(workflowName: string, jobId: string): string {
|
||||
const sanitizedWorkflow = workflowName.replace(/,/g, '').toLowerCase()
|
||||
return `${sanitizedWorkflow}-${jobId}`
|
||||
return process.env[CACHE_KEY_JOB_VAR] || github.context.job
|
||||
}
|
||||
|
||||
function getCacheKeyJobInstance(): string {
|
||||
|
@ -127,25 +122,11 @@ function getCacheKeyJobInstance(): string {
|
|||
return override
|
||||
}
|
||||
|
||||
// By default, we hash the full `matrix` data for the run, to uniquely identify this job invocation
|
||||
// By default, we hash the workflow name and the full `matrix` data for the run, to uniquely identify this job invocation
|
||||
// The only way we can obtain the `matrix` data is via the `workflow-job-context` parameter in action.yml.
|
||||
const workflowName = github.context.workflow
|
||||
const workflowJobContext = params.getJobMatrix()
|
||||
return hashStrings([workflowJobContext])
|
||||
}
|
||||
|
||||
export function getUniqueLabelForJobInstance(): string {
|
||||
return getUniqueLabelForJobInstanceValues(github.context.workflow, github.context.job, params.getJobMatrix())
|
||||
}
|
||||
|
||||
export function getUniqueLabelForJobInstanceValues(workflow: string, jobId: string, matrixJson: string): string {
|
||||
const matrix = JSON.parse(matrixJson)
|
||||
const matrixString = Object.values(matrix).join('-')
|
||||
const label = matrixString ? `${workflow}-${jobId}-${matrixString}` : `${workflow}-${jobId}`
|
||||
return sanitize(label)
|
||||
}
|
||||
|
||||
function sanitize(value: string): string {
|
||||
return value.replace(/[^a-zA-Z0-9_-]/g, '').toLowerCase()
|
||||
return hashStrings([workflowName, workflowJobContext])
|
||||
}
|
||||
|
||||
function getCacheKeyJobExecution(): string {
|
||||
|
|
|
@ -17,10 +17,4 @@ describe('cacheUtils-utils', () => {
|
|||
expect(posixHash).toBe(windowsHash)
|
||||
})
|
||||
})
|
||||
describe('sanitizes workflow name in cache key', () => {
|
||||
it('with comma', () => {
|
||||
const cacheKey = cacheUtils.getCacheKeyForJob("Workflow, with,commas", "JOB_ID")
|
||||
expect(cacheKey).toBe('workflow withcommas-JOB_ID')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Reference in a new issue