2021-08-24 12:57:17 -06:00
|
|
|
import * as core from '@actions/core'
|
2021-11-05 06:54:31 -06:00
|
|
|
import * as cache from '@actions/cache'
|
2022-05-28 11:23:31 -06:00
|
|
|
import * as github from '@actions/github'
|
2022-06-06 09:26:49 -06:00
|
|
|
import * as exec from '@actions/exec'
|
|
|
|
|
2021-09-05 21:35:17 -06:00
|
|
|
import * as crypto from 'crypto'
|
2021-09-27 21:05:17 -06:00
|
|
|
import * as path from 'path'
|
2021-10-04 23:59:08 +02:00
|
|
|
import * as fs from 'fs'
|
2021-08-24 12:57:17 -06:00
|
|
|
|
2022-01-19 12:11:51 -07:00
|
|
|
import {CacheEntryListener} from './cache-reporting'
|
|
|
|
|
2022-05-28 11:23:31 -06:00
|
|
|
const CACHE_PROTOCOL_VERSION = 'v6-'
|
|
|
|
|
2021-12-07 16:52:53 -07:00
|
|
|
const JOB_CONTEXT_PARAMETER = 'workflow-job-context'
|
2021-10-27 16:05:07 -06:00
|
|
|
const CACHE_DISABLED_PARAMETER = 'cache-disabled'
|
|
|
|
const CACHE_READONLY_PARAMETER = 'cache-read-only'
|
2022-01-20 09:36:57 -07:00
|
|
|
const CACHE_WRITEONLY_PARAMETER = 'cache-write-only'
|
2022-05-28 11:23:31 -06:00
|
|
|
const STRICT_CACHE_MATCH_PARAMETER = 'gradle-home-cache-strict-match'
|
2022-06-12 09:53:04 -06:00
|
|
|
const CACHE_CLEANUP_ENABLED_PARAMETER = 'gradle-home-cache-cleanup'
|
2021-10-27 16:05:07 -06:00
|
|
|
const CACHE_DEBUG_VAR = 'GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED'
|
2022-05-28 11:23:31 -06:00
|
|
|
|
|
|
|
const CACHE_KEY_PREFIX_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX'
|
|
|
|
const CACHE_KEY_OS_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_ENVIRONMENT'
|
|
|
|
const CACHE_KEY_JOB_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB'
|
|
|
|
const CACHE_KEY_JOB_INSTANCE_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_INSTANCE'
|
|
|
|
const CACHE_KEY_JOB_EXECUTION_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_EXECUTION'
|
2021-10-27 16:05:07 -06:00
|
|
|
|
2022-08-22 13:28:43 -06:00
|
|
|
const SEGMENT_DOWNLOAD_TIMEOUT_VAR = 'SEGMENT_DOWNLOAD_TIMEOUT_MINS'
|
2022-08-22 13:07:12 -06:00
|
|
|
const SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT = 10 * 60 * 1000 // 10 minutes
|
|
|
|
|
2021-09-12 14:26:38 -06:00
|
|
|
export function isCacheDisabled(): boolean {
|
2022-06-20 17:39:25 -06:00
|
|
|
if (!cache.isFeatureAvailable()) {
|
|
|
|
return true
|
|
|
|
}
|
2021-10-27 16:05:07 -06:00
|
|
|
return core.getBooleanInput(CACHE_DISABLED_PARAMETER)
|
2021-08-24 12:57:17 -06:00
|
|
|
}
|
|
|
|
|
2021-09-12 14:26:38 -06:00
|
|
|
export function isCacheReadOnly(): boolean {
|
2022-06-04 11:28:12 -06:00
|
|
|
return !isCacheWriteOnly() && core.getBooleanInput(CACHE_READONLY_PARAMETER)
|
2021-08-24 12:57:17 -06:00
|
|
|
}
|
2021-09-05 17:10:47 -06:00
|
|
|
|
2022-01-20 09:36:57 -07:00
|
|
|
export function isCacheWriteOnly(): boolean {
|
|
|
|
return core.getBooleanInput(CACHE_WRITEONLY_PARAMETER)
|
|
|
|
}
|
|
|
|
|
2021-09-12 14:08:22 -06:00
|
|
|
export function isCacheDebuggingEnabled(): boolean {
|
2021-10-27 16:05:07 -06:00
|
|
|
return process.env[CACHE_DEBUG_VAR] ? true : false
|
2021-09-12 14:08:22 -06:00
|
|
|
}
|
|
|
|
|
2022-06-12 09:53:04 -06:00
|
|
|
export function isCacheCleanupEnabled(): boolean {
|
|
|
|
return core.getBooleanInput(CACHE_CLEANUP_ENABLED_PARAMETER)
|
|
|
|
}
|
|
|
|
|
2022-05-28 11:23:31 -06:00
|
|
|
/**
|
|
|
|
* Represents a key used to restore a cache entry.
|
|
|
|
* The Github Actions cache will first try for an exact match on the key.
|
|
|
|
* If that fails, it will try for a prefix match on any of the restoreKeys.
|
|
|
|
*/
|
|
|
|
export class CacheKey {
|
|
|
|
key: string
|
|
|
|
restoreKeys: string[]
|
|
|
|
|
|
|
|
constructor(key: string, restoreKeys: string[]) {
|
|
|
|
this.key = key
|
|
|
|
this.restoreKeys = restoreKeys
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Generates a cache key specific to the current job execution.
|
|
|
|
* The key is constructed from the following inputs (with some user overrides):
|
|
|
|
* - The cache protocol version
|
|
|
|
* - The name of the cache
|
|
|
|
* - The runner operating system
|
|
|
|
* - The name of the Job being executed
|
|
|
|
* - The matrix values for the Job being executed (job context)
|
|
|
|
* - The SHA of the commit being executed
|
|
|
|
*
|
|
|
|
* Caches are restored by trying to match the these key prefixes in order:
|
|
|
|
* - The full key with SHA
|
|
|
|
* - A previous key for this Job + matrix
|
|
|
|
* - Any previous key for this Job (any matrix)
|
|
|
|
* - Any previous key for this cache on the current OS
|
|
|
|
*/
|
|
|
|
export function generateCacheKey(cacheName: string): CacheKey {
|
|
|
|
const cacheKeyBase = `${getCacheKeyPrefix()}${CACHE_PROTOCOL_VERSION}${cacheName}`
|
|
|
|
|
|
|
|
// At the most general level, share caches for all executions on the same OS
|
|
|
|
const cacheKeyForEnvironment = `${cacheKeyBase}|${getCacheKeyEnvironment()}`
|
|
|
|
|
|
|
|
// Prefer caches that run this job
|
|
|
|
const cacheKeyForJob = `${cacheKeyForEnvironment}|${getCacheKeyJob()}`
|
|
|
|
|
|
|
|
// Prefer (even more) jobs that run this job with the same context (matrix)
|
|
|
|
const cacheKeyForJobContext = `${cacheKeyForJob}[${getCacheKeyJobInstance()}]`
|
|
|
|
|
|
|
|
// Exact match on Git SHA
|
|
|
|
const cacheKey = `${cacheKeyForJobContext}-${getCacheKeyJobExecution()}`
|
|
|
|
|
|
|
|
if (core.getBooleanInput(STRICT_CACHE_MATCH_PARAMETER)) {
|
|
|
|
return new CacheKey(cacheKey, [cacheKeyForJobContext])
|
|
|
|
}
|
|
|
|
|
|
|
|
return new CacheKey(cacheKey, [cacheKeyForJobContext, cacheKeyForJob, cacheKeyForEnvironment])
|
|
|
|
}
|
|
|
|
|
2021-10-16 09:44:35 -06:00
|
|
|
export function getCacheKeyPrefix(): string {
|
2021-10-16 08:33:42 -06:00
|
|
|
// Prefix can be used to force change all cache keys (defaults to cache protocol version)
|
2022-05-28 11:23:31 -06:00
|
|
|
return process.env[CACHE_KEY_PREFIX_VAR] || ''
|
|
|
|
}
|
|
|
|
|
|
|
|
function getCacheKeyEnvironment(): string {
|
|
|
|
const runnerOs = process.env['RUNNER_OS'] || ''
|
|
|
|
return process.env[CACHE_KEY_OS_VAR] || runnerOs
|
|
|
|
}
|
|
|
|
|
|
|
|
function getCacheKeyJob(): string {
|
|
|
|
// Prefix can be used to force change all cache keys (defaults to cache protocol version)
|
|
|
|
return process.env[CACHE_KEY_JOB_VAR] || github.context.job
|
2021-10-16 09:44:35 -06:00
|
|
|
}
|
|
|
|
|
2022-05-28 11:23:31 -06:00
|
|
|
function getCacheKeyJobInstance(): string {
|
|
|
|
const override = process.env[CACHE_KEY_JOB_INSTANCE_VAR]
|
|
|
|
if (override) {
|
|
|
|
return override
|
|
|
|
}
|
|
|
|
|
2021-12-07 16:52:53 -07:00
|
|
|
// By default, we hash the full `matrix` data for the run, to uniquely identify this job invocation
|
|
|
|
// The only way we can obtain the `matrix` data is via the `workflow-job-context` parameter in action.yml.
|
|
|
|
const workflowJobContext = core.getInput(JOB_CONTEXT_PARAMETER)
|
|
|
|
return hashStrings([workflowJobContext])
|
|
|
|
}
|
|
|
|
|
2022-05-28 11:23:31 -06:00
|
|
|
function getCacheKeyJobExecution(): string {
|
|
|
|
// Used to associate a cache key with a particular execution (default is bound to the git commit sha)
|
|
|
|
return process.env[CACHE_KEY_JOB_EXECUTION_VAR] || github.context.sha
|
|
|
|
}
|
|
|
|
|
2021-12-07 16:52:53 -07:00
|
|
|
export function hashFileNames(fileNames: string[]): string {
|
|
|
|
return hashStrings(fileNames.map(x => x.replace(new RegExp(`\\${path.sep}`, 'g'), '/')))
|
|
|
|
}
|
|
|
|
|
2021-09-05 21:35:17 -06:00
|
|
|
export function hashStrings(values: string[]): string {
|
|
|
|
const hash = crypto.createHash('md5')
|
|
|
|
for (const value of values) {
|
|
|
|
hash.update(value)
|
|
|
|
}
|
|
|
|
return hash.digest('hex')
|
2021-09-05 17:10:47 -06:00
|
|
|
}
|
|
|
|
|
2021-12-29 16:07:33 -07:00
|
|
|
export async function restoreCache(
|
|
|
|
cachePath: string[],
|
|
|
|
cacheKey: string,
|
2022-01-19 12:11:51 -07:00
|
|
|
cacheRestoreKeys: string[],
|
|
|
|
listener: CacheEntryListener
|
2021-12-29 16:07:33 -07:00
|
|
|
): Promise<cache.CacheEntry | undefined> {
|
2022-01-19 12:11:51 -07:00
|
|
|
listener.markRequested(cacheKey, cacheRestoreKeys)
|
2021-12-29 16:07:33 -07:00
|
|
|
try {
|
2022-08-22 13:07:12 -06:00
|
|
|
// Only override the read timeout if the SEGMENT_DOWNLOAD_TIMEOUT_MINS env var has NOT been set
|
2022-08-22 13:28:43 -06:00
|
|
|
const cacheRestoreOptions = process.env[SEGMENT_DOWNLOAD_TIMEOUT_VAR]
|
2022-08-22 13:07:12 -06:00
|
|
|
? {}
|
|
|
|
: {segmentTimeoutInMs: SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT}
|
|
|
|
const restoredEntry = await cache.restoreCache(cachePath, cacheKey, cacheRestoreKeys, cacheRestoreOptions)
|
2022-01-19 12:11:51 -07:00
|
|
|
if (restoredEntry !== undefined) {
|
|
|
|
listener.markRestored(restoredEntry.key, restoredEntry.size)
|
|
|
|
}
|
|
|
|
return restoredEntry
|
2021-12-29 16:07:33 -07:00
|
|
|
} catch (error) {
|
2022-08-16 15:29:23 -06:00
|
|
|
listener.markNotRestored((error as Error).message)
|
2021-12-29 16:07:33 -07:00
|
|
|
handleCacheFailure(error, `Failed to restore ${cacheKey}`)
|
|
|
|
return undefined
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-19 12:11:51 -07:00
|
|
|
export async function saveCache(cachePath: string[], cacheKey: string, listener: CacheEntryListener): Promise<void> {
|
2021-12-29 16:07:33 -07:00
|
|
|
try {
|
2022-01-19 12:11:51 -07:00
|
|
|
const savedEntry = await cache.saveCache(cachePath, cacheKey)
|
|
|
|
listener.markSaved(savedEntry.key, savedEntry.size)
|
2021-12-29 16:07:33 -07:00
|
|
|
} catch (error) {
|
2022-01-19 12:11:51 -07:00
|
|
|
if (error instanceof cache.ReserveCacheError) {
|
|
|
|
listener.markAlreadyExists(cacheKey)
|
2022-08-16 15:29:23 -06:00
|
|
|
} else {
|
|
|
|
listener.markNotSaved((error as Error).message)
|
2022-01-19 12:11:51 -07:00
|
|
|
}
|
2022-06-06 08:48:03 -06:00
|
|
|
handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`)
|
2021-12-29 16:07:33 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export function cacheDebug(message: string): void {
|
|
|
|
if (isCacheDebuggingEnabled()) {
|
|
|
|
core.info(message)
|
|
|
|
} else {
|
|
|
|
core.debug(message)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-05 06:54:31 -06:00
|
|
|
export function handleCacheFailure(error: unknown, message: string): void {
|
|
|
|
if (error instanceof cache.ValidationError) {
|
|
|
|
// Fail on cache validation errors
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
if (error instanceof cache.ReserveCacheError) {
|
|
|
|
// Reserve cache errors are expected if the artifact has been previously cached
|
2022-01-19 13:31:55 -07:00
|
|
|
core.info(`${message}: ${error}`)
|
2021-11-05 06:54:31 -06:00
|
|
|
} else {
|
|
|
|
// Warn on all other errors
|
|
|
|
core.warning(`${message}: ${error}`)
|
2022-03-18 13:53:28 -06:00
|
|
|
if (error instanceof Error && error.stack) {
|
|
|
|
cacheDebug(error.stack)
|
|
|
|
}
|
2021-11-05 06:54:31 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-04 23:59:08 +02:00
|
|
|
/**
|
2021-10-15 14:54:29 -06:00
|
|
|
* Attempt to delete a file or directory, waiting to allow locks to be released
|
2021-10-04 23:59:08 +02:00
|
|
|
*/
|
|
|
|
export async function tryDelete(file: string): Promise<void> {
|
2022-06-06 09:26:49 -06:00
|
|
|
const maxAttempts = 5
|
|
|
|
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
2022-06-06 15:30:51 -06:00
|
|
|
if (!fs.existsSync(file)) {
|
|
|
|
return
|
|
|
|
}
|
2021-10-04 23:59:08 +02:00
|
|
|
try {
|
2022-06-06 15:30:51 -06:00
|
|
|
const stat = fs.lstatSync(file)
|
2021-10-15 14:54:29 -06:00
|
|
|
if (stat.isDirectory()) {
|
|
|
|
fs.rmdirSync(file, {recursive: true})
|
|
|
|
} else {
|
|
|
|
fs.unlinkSync(file)
|
|
|
|
}
|
2021-10-04 23:59:08 +02:00
|
|
|
return
|
|
|
|
} catch (error) {
|
2022-06-06 09:26:49 -06:00
|
|
|
if (attempt === maxAttempts) {
|
|
|
|
core.warning(`Failed to delete ${file}, which will impact caching.
|
|
|
|
It is likely locked by another process. Output of 'jps -ml':
|
|
|
|
${await getJavaProcesses()}`)
|
2021-10-04 23:59:08 +02:00
|
|
|
throw error
|
|
|
|
} else {
|
2022-06-06 09:26:49 -06:00
|
|
|
cacheDebug(`Attempt to delete ${file} failed. Will try again.`)
|
2021-10-04 23:59:08 +02:00
|
|
|
await delay(1000)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function delay(ms: number): Promise<void> {
|
|
|
|
return new Promise(resolve => setTimeout(resolve, ms))
|
|
|
|
}
|
2022-06-06 09:26:49 -06:00
|
|
|
|
|
|
|
async function getJavaProcesses(): Promise<string> {
|
|
|
|
const jpsOutput = await exec.getExecOutput('jps', ['-lm'])
|
|
|
|
return jpsOutput.stdout
|
|
|
|
}
|