Remove all action sources: these have been migrated to 'gradle/actions'

Starting with the `v3` release, this action will delegate to `gradle/actions/setup-gradle`.
All repository sources have been migrated to https://github.com/gradle/actions, and
are no longer required in this repository.

Workflows testing the action functionality have been retained, but any workflows involving
building or inspecting code have also been migrated to `gradle/actions`.
This commit is contained in:
daz 2024-01-25 19:59:53 -07:00
parent 2a8bfcf231
commit 272883a7ba
No known key found for this signature in database
97 changed files with 1 additions and 302990 deletions

View File

@ -1,3 +0,0 @@
dist/
lib/
node_modules/

View File

@ -1,56 +0,0 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"i18n-text/no-en": "off",
"no-unused-vars": "off",
"no-shadow": "off",
"sort-imports": "off",
"@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_" }],
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"camelcase": "off",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-shadow": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": ["error", {"ignoreStringArrays": true}],
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@ -1,20 +0,0 @@
name: 'Build and upload distribution'
# Builds the action distribution an uploads as an artifact for later download
runs:
using: "composite"
steps:
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Build distribution
shell: bash
run: |
npm -v
node -v
npm install
npm run build
- name: Upload distribution
uses: actions/upload-artifact@v4
with:
name: dist
path: dist/

View File

@ -1,12 +0,0 @@
name: 'Download dist'
# Downloads a 'dist' directory artifact that was uploaded in an earlier step
# We control this with an environment variable to allow for easier global configuration.
runs:
using: "composite"
steps:
- name: Download dist
if: ${{ env.DOWNLOAD_DIST == 'true' }}
uses: actions/download-artifact@v4
with:
name: dist
path: dist/

View File

@ -15,18 +15,6 @@ updates:
patterns:
- "*"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
ignore:
- dependency-name: "@types/node" # Breaking change: update with next major release
- dependency-name: "@octokit/rest" # Tied to node version
groups:
npm-dependencies:
patterns:
- "*"
- package-ecosystem: "gradle"
directory: ".github/workflow-samples/gradle-plugin"
registries:
@ -63,9 +51,3 @@ updates:
- gradle-plugin-portal
schedule:
interval: "daily"
- package-ecosystem: "gradle"
directory: "test/init-scripts"
registries:
- gradle-plugin-portal
schedule:
interval: "daily"

View File

@ -1,39 +0,0 @@
name: CI-codeql
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
schedule:
- cron: '25 23 * * 2'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript-typescript' ]
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config: |
paths:
- src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@ -1,20 +0,0 @@
# Dependency Review Action
#
# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.
#
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: CI-dependency-review
on: [pull_request]
permissions:
contents: read
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v4
- name: 'Dependency Review'
uses: actions/dependency-review-action@v4

View File

@ -20,11 +20,6 @@ jobs:
with:
cache-key-prefix: ${{github.run_number}}-
cache-cleanup:
uses: ./.github/workflows/integ-test-cache-cleanup.yml
with:
cache-key-prefix: ${{github.run_number}}-
caching-config:
uses: ./.github/workflows/integ-test-caching-config.yml
with:

View File

@ -1,26 +0,0 @@
name: CI-init-script-check
on:
push:
paths:
- '.github/workflows/ci-init-script-check.yml'
- 'src/resources/init-scripts/**'
- 'test/init-scripts/**'
workflow_dispatch:
jobs:
test-init-scripts:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: 8
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0 # Use a released version to avoid breakages
- name: Run integration tests
working-directory: test/init-scripts
run: ./gradlew check

View File

@ -8,148 +8,84 @@ on:
- release/**
jobs:
build-distribution:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Build and upload distribution
uses: ./.github/actions/build-dist
run-unit-tests:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Configure Gradle as default for unit test
uses: ./
with:
gradle-version: 8.5
- name: Run tests
run: |
npm install
npm run all
action-inputs:
needs: build-distribution
uses: ./.github/workflows/integ-test-action-inputs.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
cache-cleanup:
needs: build-distribution
uses: ./.github/workflows/integ-test-cache-cleanup.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
cache-key-prefix: ${{github.run_number}}- # Requires a fresh cache entry each run
caching-config:
needs: build-distribution
uses: ./.github/workflows/integ-test-caching-config.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
dependency-graph:
needs: build-distribution
uses: ./.github/workflows/integ-test-dependency-graph.yml
permissions:
contents: write
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
dependency-graph-failures:
needs: build-distribution
uses: ./.github/workflows/integ-test-dependency-graph-failures.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
execution-with-caching:
needs: build-distribution
uses: ./.github/workflows/integ-test-execution-with-caching.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
execution:
needs: build-distribution
uses: ./.github/workflows/integ-test-execution.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
develocity-injection:
needs: build-distribution
uses: ./.github/workflows/integ-test-inject-develocity.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
secrets:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_SOLUTIONS_ACCESS_TOKEN }}
provision-gradle-versions:
needs: build-distribution
uses: ./.github/workflows/integ-test-provision-gradle-versions.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
restore-configuration-cache:
needs: build-distribution
uses: ./.github/workflows/integ-test-restore-configuration-cache.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
secrets:
GRADLE_ENCRYPTION_KEY: ${{ secrets.GRADLE_ENCRYPTION_KEY }}
restore-containerized-gradle-home:
needs: build-distribution
uses: ./.github/workflows/integ-test-restore-containerized-gradle-home.yml
with:
download-dist: true
restore-custom-gradle-home:
needs: build-distribution
uses: ./.github/workflows/integ-test-restore-custom-gradle-home.yml
with:
download-dist: true
restore-gradle-home:
needs: build-distribution
uses: ./.github/workflows/integ-test-restore-gradle-home.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
restore-java-toolchain:
needs: build-distribution
uses: ./.github/workflows/integ-test-restore-java-toolchain.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
sample-kotlin-dsl:
needs: build-distribution
uses: ./.github/workflows/integ-test-sample-kotlin-dsl.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
sample-gradle-plugin:
needs: build-distribution
uses: ./.github/workflows/integ-test-sample-gradle-plugin.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true
toolchain-detection:
needs: build-distribution
uses: ./.github/workflows/integ-test-detect-java-toolchains.yml
with:
runner-os: '["ubuntu-latest"]'
download-dist: true

View File

@ -1,44 +0,0 @@
name: CI-verify-outputs
on:
pull_request:
types:
- assigned
- review_requested
push:
branches:
- main
- release/**
- dependabot/**
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Build
run: |
npm -v
node -v
npm install
npm run build
- name: Compare the expected and actual dist/ directories
run: |
if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff
exit 1
fi
id: diff
# If index.js was different than expected, upload the expected version as an artifact
- uses: actions/upload-artifact@v4
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist
path: dist/

View File

@ -13,11 +13,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Build distribution
shell: bash
run: |
npm install
npm run build
- name: Setup Gradle
uses: ./
- name: Build kotlin-dsl project
@ -47,11 +42,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Build distribution
shell: bash
run: |
npm install
npm run build
- name: Setup Gradle
uses: ./
with:
@ -68,11 +58,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Build distribution
shell: bash
run: |
npm install
npm run build
- name: Pre-create Gradle User Home
shell: bash
run: |

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: action-inputs-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Invoke with multi-line arguments
uses: ./
with:

View File

@ -1,87 +0,0 @@
name: Test cache cleanup
on:
workflow_call:
inputs:
cache-key-prefix:
type: string
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: integ-test-cache-cleanup-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
jobs:
full-build:
strategy:
matrix:
os: ${{fromJSON(inputs.runner-os)}}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
cache-read-only: false # For testing, allow writing cache entries on non-default branches
- name: Build with 3.1
working-directory: test/jest/resources/cache-cleanup
run: gradle --no-daemon --build-cache -Dcommons_math3_version="3.1" build
# Second build will use the cache from the first build, but cleanup should remove unused artifacts
assemble-build:
needs: full-build
strategy:
matrix:
os: ${{fromJSON(inputs.runner-os)}}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
cache-read-only: false
gradle-home-cache-cleanup: true
- name: Build with 3.1.1
working-directory: test/jest/resources/cache-cleanup
run: gradle --no-daemon --build-cache -Dcommons_math3_version="3.1.1" build
check-clean-cache:
needs: assemble-build
strategy:
matrix:
os: ${{fromJSON(inputs.runner-os)}}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
cache-read-only: true
- name: Report Gradle User Home
run: du -hc ~/.gradle/caches/modules-2
- name: Verify cleaned cache
shell: bash
run: |
if [ ! -e ~/.gradle/caches/modules-2/files-2.1/org.apache.commons/commons-math3/3.1.1 ]; then
echo "::error ::Should find commons-math3 3.1.1 in cache"
exit 1
fi
if [ -e ~/.gradle/caches/modules-2/files-2.1/org.apache.commons/commons-math3/3.1 ]; then
echo "::error ::Should NOT find commons-math3 3.1 in cache"
exit 1
fi

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: action-inputs-caching-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -54,8 +48,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -80,8 +72,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -103,8 +93,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Create dummy Gradle User Home
run: mkdir -p ~/.gradle/caches
- name: Setup Gradle
@ -131,8 +119,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -152,8 +138,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:

View File

@ -7,13 +7,9 @@ on:
type: string
runner-os:
type: string
default: '["ubuntu-latest"]'
download-dist:
type: boolean
default: false
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: dependency-graph-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -23,8 +19,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -47,8 +41,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -70,8 +62,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -90,8 +80,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:

View File

@ -8,15 +8,11 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
permissions:
contents: write
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: dependency-graph-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -29,8 +25,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -47,8 +41,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -63,8 +55,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Submit dependency graphs
uses: ./
with:
@ -78,8 +68,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:
@ -118,8 +106,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle for dependency-graph generate
uses: ./
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: detect-java-toolchain-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -28,8 +24,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
- name: List detected toolchains
@ -57,8 +51,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java 20
uses: actions/setup-java@v4
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: execution-with-caching-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Execute Gradle build
uses: ./
with:
@ -45,8 +39,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Execute Gradle build
uses: ./
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: execution-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -31,8 +27,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Test use defined Gradle version
uses: ./
with:
@ -63,8 +57,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:

View File

@ -8,15 +8,11 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
secrets:
DEVELOCITY_ACCESS_KEY:
required: true
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: provision-gradle-versions-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -36,8 +32,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:
@ -68,8 +62,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: provision-gradle-versions-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -31,8 +27,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle with v6.9
uses: ./
with:
@ -83,8 +77,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:

View File

@ -8,15 +8,11 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
secrets:
GRADLE_ENCRYPTION_KEY:
required: true
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: restore-configuration-cache-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -31,8 +27,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:
@ -59,8 +53,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:
@ -97,8 +89,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:
@ -126,8 +116,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:
@ -154,8 +142,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:
@ -183,8 +169,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java to ensure consistency
uses: actions/setup-java@v4
with:

View File

@ -5,12 +5,8 @@ on:
inputs:
cache-key-prefix:
type: string
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: restore-custom-gradle-home-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -21,8 +17,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:
@ -44,8 +38,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Java
uses: actions/setup-java@v4
with:

View File

@ -5,12 +5,8 @@ on:
inputs:
cache-key-prefix:
type: string
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: restore-custom-gradle-home-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -24,8 +20,6 @@ jobs:
echo "GRADLE_USER_HOME=$GITHUB_WORKSPACE/gradle-user-home" >> $GITHUB_ENV
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -45,8 +39,6 @@ jobs:
echo "GRADLE_USER_HOME=$GITHUB_WORKSPACE/gradle-user-home" >> $GITHUB_ENV
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -66,8 +58,6 @@ jobs:
echo "GRADLE_USER_HOME=$GITHUB_WORKSPACE/gradle-user-home" >> $GITHUB_ENV
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: restore-gradle-home-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_KEY_JOB: restore-gradle-home
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -27,8 +23,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -47,8 +41,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -67,8 +59,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -87,8 +77,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle with no extracted cache entries restored
uses: ./
env:
@ -109,8 +97,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Pre-create Gradle User Home
shell: bash
run: |

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: restore-java-toolchain-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -46,8 +40,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: sample-gradle-plugin-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -45,8 +39,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:

View File

@ -8,12 +8,8 @@ on:
runner-os:
type: string
default: '["ubuntu-latest", "windows-latest", "macos-latest"]'
download-dist:
type: boolean
default: false
env:
DOWNLOAD_DIST: ${{ inputs.download-dist }}
GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX: sample-kotlin-dsl-${{ inputs.cache-key-prefix }}
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
@ -26,8 +22,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:
@ -45,8 +39,6 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Download distribution if required
uses: ./.github/actions/download-dist
- name: Setup Gradle
uses: ./
with:

View File

@ -1,28 +0,0 @@
name: Purge old workflow runs
on:
workflow_dispatch:
inputs:
days:
description: 'Purge runs older than days'
required: true
default: 30
minimum_runs:
description: 'The minimum runs to keep for each workflow.'
required: true
default: 6
delete_workflow_pattern:
description: 'The name of the workflow. if not set then it will target all workflows.'
required: false
jobs:
del_runs:
runs-on: ubuntu-latest
steps:
- name: Purge workflow runs
uses: Mattraks/delete-workflow-runs@v2
with:
token: ${{ github.token }}
repository: ${{ github.repository }}
retain_days: ${{ github.event.inputs.days }}
keep_minimum_runs: ${{ github.event.inputs.minimum_runs }}
delete_workflow_pattern: ${{ github.event.inputs.delete_workflow_pattern }}

1
.nvmrc
View File

@ -1 +0,0 @@
v16

View File

@ -1,3 +0,0 @@
dist/
lib/
node_modules/

View File

@ -1,11 +0,0 @@
{
"printWidth": 120,
"tabWidth": 4,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@ -1,3 +0,0 @@
# Configuration file for asdf version manager
nodejs 20.10.0
gradle 8.5

View File

@ -1,14 +0,0 @@
### How to merge a Dependabot PR
The "distribution" for a GitHub Action is checked into the repository itself.
In the case of the `gradle-build-action`, the transpiled sources are committed to the `dist` directory.
Any production dependencies are inlined into the distribution.
So if a Dependabot PR updates a production dependency (or a dev dependency that changes the distribution, like the Typescript compiler),
then a manual step is required to rebuild the dist and commit.
The simplest process to follow is:
1. Checkout the dependabot branch locally eg: `git checkout dependabot/npm_and_yarn/actions/github-5.1.0`
2. Run `npm install` to download and the new dependencies and install locally
3. Run `npm run build` to regenerate the distribution
4. Push the changes to the dependabot branch
5. If/when the checks pass, you can merge the dependabot PR

142003
dist/main/index.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

139100
dist/post/index.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,10 +0,0 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts', 'json'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
transform: {
'^.+\\.ts$': 'ts-jest'
},
verbose: true
}

15866
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,62 +0,0 @@
{
"name": "gradle-build-action",
"version": "1.0.0",
"private": true,
"description": "Execute Gradle Build",
"scripts": {
"postinstall": "patch-package",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"compile-main": "ncc build src/main.ts --out dist/main --source-map --no-source-map-register",
"compile-post": "ncc build src/post.ts --out dist/post --source-map --no-source-map-register",
"compile": "npm-run-all --parallel compile-*",
"check": "npm-run-all --parallel format lint",
"test": "jest",
"build": "npm run check && npm run compile",
"all": "npm run build && npm test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/gradle/gradle-build-action.git"
},
"keywords": [
"github",
"actions",
"github-actions",
"gradle"
],
"license": "MIT",
"dependencies": {
"@actions/artifact": "2.1.0",
"@actions/cache": "3.2.3",
"@actions/core": "1.10.1",
"@actions/exec": "1.1.1",
"@actions/github": "6.0.0",
"@actions/glob": "0.4.0",
"@actions/http-client": "2.2.0",
"@actions/tool-cache": "2.0.1",
"@octokit/rest": "19.0.13",
"@octokit/webhooks-types": "7.3.1",
"semver": "7.5.4",
"string-argv": "0.3.2"
},
"devDependencies": {
"@types/jest": "29.5.11",
"@types/node": "20.10.0",
"@types/unzipper": "0.10.9",
"@typescript-eslint/parser": "6.19.1",
"@vercel/ncc": "0.38.1",
"eslint": "8.56.0",
"eslint-plugin-github": "4.10.1",
"eslint-plugin-jest": "27.6.3",
"eslint-plugin-prettier": "5.1.3",
"jest": "29.7.0",
"js-yaml": "4.1.0",
"npm-run-all": "4.1.5",
"patch-package": "8.0.0",
"prettier": "3.2.4",
"ts-jest": "29.1.2",
"typescript": "5.3.3"
}
}

View File

@ -1,113 +0,0 @@
diff --git a/node_modules/@actions/cache/lib/cache.d.ts b/node_modules/@actions/cache/lib/cache.d.ts
index 4658366..b796e58 100644
--- a/node_modules/@actions/cache/lib/cache.d.ts
+++ b/node_modules/@actions/cache/lib/cache.d.ts
@@ -21,7 +21,7 @@ export declare function isFeatureAvailable(): boolean;
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
-export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise<string | undefined>;
+export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise<CacheEntry | undefined>;
/**
* Saves a list of files with the specified key
*
@@ -31,4 +31,12 @@ export declare function restoreCache(paths: string[], primaryKey: string, restor
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
-export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise<number>;
+export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise<CacheEntry>;
+
+// PATCHED: Add `CacheEntry` as return type for save/restore functions
+// This allows us to track and report on cache entry sizes.
+export declare class CacheEntry {
+ key: string;
+ size?: number;
+ constructor(key: string, size?: number);
+}
diff --git a/node_modules/@actions/cache/lib/cache.js b/node_modules/@actions/cache/lib/cache.js
index 9d636aa..a176bd7 100644
--- a/node_modules/@actions/cache/lib/cache.js
+++ b/node_modules/@actions/cache/lib/cache.js
@@ -127,18 +127,21 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
core.info('Cache restored successfully');
- return cacheEntry.cacheKey;
- }
- catch (error) {
- const typedError = error;
- if (typedError.name === ValidationError.name) {
- throw error;
- }
- else {
- // Supress all non-validation cache related errors because caching should be optional
- core.warning(`Failed to restore: ${error.message}`);
- }
+
+ // PATCHED - Return more inforamtion about restored entry
+ return new CacheEntry(cacheEntry.cacheKey, archiveFileSize);;
}
+ // PATCHED - propagate errors
+ // catch (error) {
+ // const typedError = error;
+ // if (typedError.name === ValidationError.name) {
+ // throw error;
+ // }
+ // else {
+ // // Supress all non-validation cache related errors because caching should be optional
+ // core.warning(`Failed to restore: ${error.message}`);
+ // }
+ // }
finally {
// Try to delete the archive to save space
try {
@@ -206,19 +209,23 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
+
+ // PATCHED - Return more inforamtion about saved entry
+ return new CacheEntry(key, archiveFileSize);
}
- catch (error) {
- const typedError = error;
- if (typedError.name === ValidationError.name) {
- throw error;
- }
- else if (typedError.name === ReserveCacheError.name) {
- core.info(`Failed to save: ${typedError.message}`);
- }
- else {
- core.warning(`Failed to save: ${typedError.message}`);
- }
- }
+ // PATCHED - propagate errors
+ // catch (error) {
+ // const typedError = error;
+ // if (typedError.name === ValidationError.name) {
+ // throw error;
+ // }
+ // else if (typedError.name === ReserveCacheError.name) {
+ // core.info(`Failed to save: ${typedError.message}`);
+ // }
+ // else {
+ // core.warning(`Failed to save: ${typedError.message}`);
+ // }
+ // }
finally {
// Try to delete the archive to save space
try {
@@ -232,4 +239,11 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
});
}
exports.saveCache = saveCache;
+class CacheEntry {
+ constructor(key, size) {
+ this.key = key;
+ this.size = size;
+ }
+}
+exports.CacheEntry = CacheEntry;
//# sourceMappingURL=cache.js.map
\ No newline at end of file

View File

@ -1,48 +0,0 @@
diff --git a/node_modules/@azure/logger/dist-esm/src/debug.js b/node_modules/@azure/logger/dist-esm/src/debug.js
index d202779..30e8313 100644
--- a/node_modules/@azure/logger/dist-esm/src/debug.js
+++ b/node_modules/@azure/logger/dist-esm/src/debug.js
@@ -7,7 +7,7 @@ let enabledNamespaces = [];
let skippedNamespaces = [];
const debuggers = [];
if (debugEnvVariable) {
- enable(debugEnvVariable);
+ // enable(debugEnvVariable);
}
const debugObj = Object.assign((namespace) => {
return createDebugger(namespace);
diff --git a/node_modules/@azure/logger/dist-esm/src/index.js b/node_modules/@azure/logger/dist-esm/src/index.js
index cc25720..2925db5 100644
--- a/node_modules/@azure/logger/dist-esm/src/index.js
+++ b/node_modules/@azure/logger/dist-esm/src/index.js
@@ -20,7 +20,7 @@ if (logLevelFromEnv) {
setLogLevel(logLevelFromEnv);
}
else {
- console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`);
+ console.error(`AZURE_LOG_LEVEL set to unknown log level; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`);
}
}
/**
diff --git a/node_modules/@azure/logger/dist/index.js b/node_modules/@azure/logger/dist/index.js
index 81e97c3..a415e2f 100644
--- a/node_modules/@azure/logger/dist/index.js
+++ b/node_modules/@azure/logger/dist/index.js
@@ -21,7 +21,7 @@ let enabledNamespaces = [];
let skippedNamespaces = [];
const debuggers = [];
if (debugEnvVariable) {
- enable(debugEnvVariable);
+ // enable(debugEnvVariable);
}
const debugObj = Object.assign((namespace) => {
return createDebugger(namespace);
@@ -125,7 +125,7 @@ if (logLevelFromEnv) {
setLogLevel(logLevelFromEnv);
}
else {
- console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`);
+ console.error(`AZURE_LOG_LEVEL set to unknown log level; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`);
}
}
/**

View File

View File

@ -1,27 +0,0 @@
import * as fs from 'fs'
import * as path from 'path'
export interface BuildResult {
get rootProjectName(): string
get rootProjectDir(): string
get requestedTasks(): string
get gradleVersion(): string
get gradleHomeDir(): string
get buildFailed(): boolean
get buildScanUri(): string
get buildScanFailed(): boolean
}
export function loadBuildResults(): BuildResult[] {
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP']!, '.build-results')
if (!fs.existsSync(buildResultsDir)) {
return []
}
return fs.readdirSync(buildResultsDir).map(file => {
// Every file in the .build-results dir should be a BuildResults JSON
const filePath = path.join(buildResultsDir, file)
const content = fs.readFileSync(filePath, 'utf8')
return JSON.parse(content) as BuildResult
})
}

View File

@ -1,33 +0,0 @@
import * as core from '@actions/core'
import {
getBuildScanPublishEnabled,
getBuildScanTermsOfServiceUrl,
getBuildScanTermsOfServiceAgree
} from './input-params'
export function setup(): void {
if (getBuildScanPublishEnabled() && verifyTermsOfServiceAgreement()) {
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true')
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.1')
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.12.1')
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', getBuildScanTermsOfServiceUrl())
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', getBuildScanTermsOfServiceAgree())
}
}
function verifyTermsOfServiceAgreement(): boolean {
if (
getBuildScanTermsOfServiceUrl() !== 'https://gradle.com/terms-of-service' ||
getBuildScanTermsOfServiceAgree() !== 'yes'
) {
core.warning(`Terms of service must be agreed in order to publish build scans.`)
return false
}
return true
}
function maybeExportVariable(variableName: string, value: unknown): void {
if (!process.env[variableName]) {
core.exportVariable(variableName, value)
}
}

View File

@ -1,274 +0,0 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as glob from '@actions/glob'
import path from 'path'
import fs from 'fs'
import * as params from './input-params'
import {CacheListener} from './cache-reporting'
import {saveCache, restoreCache, cacheDebug, isCacheDebuggingEnabled, tryDelete, generateCacheKey} from './cache-utils'
import {GradleHomeEntryExtractor, ConfigurationCacheEntryExtractor} from './cache-extract-entries'
const RESTORED_CACHE_KEY_KEY = 'restored-cache-key'
export const META_FILE_DIR = '.gradle-build-action'
export class GradleStateCache {
private cacheName: string
private cacheDescription: string
protected readonly userHome: string
protected readonly gradleUserHome: string
constructor(userHome: string, gradleUserHome: string) {
this.userHome = userHome
this.gradleUserHome = gradleUserHome
this.cacheName = 'gradle'
this.cacheDescription = 'Gradle User Home'
}
init(): void {
this.initializeGradleUserHome()
// Export the GRADLE_ENCRYPTION_KEY variable if provided
const encryptionKey = params.getCacheEncryptionKey()
if (encryptionKey) {
core.exportVariable('GRADLE_ENCRYPTION_KEY', encryptionKey)
}
}
cacheOutputExists(): boolean {
const cachesDir = path.resolve(this.gradleUserHome, 'caches')
if (fs.existsSync(cachesDir)) {
cacheDebug(`Cache output exists at ${cachesDir}`)
return true
}
return false
}
/**
* Restores the cache entry, finding the closest match to the currently running job.
*/
async restore(listener: CacheListener): Promise<void> {
const entryListener = listener.entry(this.cacheDescription)
const cacheKey = generateCacheKey(this.cacheName)
cacheDebug(
`Requesting ${this.cacheDescription} with
key:${cacheKey.key}
restoreKeys:[${cacheKey.restoreKeys}]`
)
const cacheResult = await restoreCache(this.getCachePath(), cacheKey.key, cacheKey.restoreKeys, entryListener)
if (!cacheResult) {
core.info(`${this.cacheDescription} cache not found. Will initialize empty.`)
return
}
core.saveState(RESTORED_CACHE_KEY_KEY, cacheResult.key)
core.info(`Restored ${this.cacheDescription} from cache key: ${cacheResult.key}`)
try {
await this.afterRestore(listener)
} catch (error) {
core.warning(`Restore ${this.cacheDescription} failed in 'afterRestore': ${error}`)
}
}
/**
* Restore any extracted cache entries after the main Gradle User Home entry is restored.
*/
async afterRestore(listener: CacheListener): Promise<void> {
await this.debugReportGradleUserHomeSize('as restored from cache')
await new GradleHomeEntryExtractor(this.gradleUserHome).restore(listener)
await new ConfigurationCacheEntryExtractor(this.gradleUserHome).restore(listener)
await this.debugReportGradleUserHomeSize('after restoring common artifacts')
}
/**
* Saves the cache entry based on the current cache key unless the cache was restored with the exact key,
* in which case we cannot overwrite it.
*
* If the cache entry was restored with a partial match on a restore key, then
* it is saved with the exact key.
*/
async save(listener: CacheListener): Promise<void> {
const cacheKey = generateCacheKey(this.cacheName).key
const restoredCacheKey = core.getState(RESTORED_CACHE_KEY_KEY)
const gradleHomeEntryListener = listener.entry(this.cacheDescription)
if (restoredCacheKey && cacheKey === restoredCacheKey) {
core.info(`Cache hit occurred on the cache key ${cacheKey}, not saving cache.`)
for (const entryListener of listener.cacheEntries) {
if (entryListener === gradleHomeEntryListener) {
entryListener.markNotSaved('cache key not changed')
} else {
entryListener.markNotSaved(`referencing '${this.cacheDescription}' cache entry not saved`)
}
}
return
}
try {
await this.beforeSave(listener)
} catch (error) {
core.warning(`Save ${this.cacheDescription} failed in 'beforeSave': ${error}`)
return
}
core.info(`Caching ${this.cacheDescription} with cache key: ${cacheKey}`)
const cachePath = this.getCachePath()
await saveCache(cachePath, cacheKey, gradleHomeEntryListener)
return
}
/**
* Extract and save any defined extracted cache entries prior to the main Gradle User Home entry being saved.
*/
async beforeSave(listener: CacheListener): Promise<void> {
await this.debugReportGradleUserHomeSize('before saving common artifacts')
await this.deleteExcludedPaths()
await Promise.all([
new GradleHomeEntryExtractor(this.gradleUserHome).extract(listener),
new ConfigurationCacheEntryExtractor(this.gradleUserHome).extract(listener)
])
await this.debugReportGradleUserHomeSize(
"after extracting common artifacts (only 'caches' and 'notifications' will be stored)"
)
}
/**
* Delete any file paths that are excluded by the `gradle-home-cache-excludes` parameter.
*/
private async deleteExcludedPaths(): Promise<void> {
const rawPaths: string[] = params.getCacheExcludes()
rawPaths.push('caches/*/cc-keystore')
const resolvedPaths = rawPaths.map(x => path.resolve(this.gradleUserHome, x))
for (const p of resolvedPaths) {
cacheDebug(`Removing excluded path: ${p}`)
const globber = await glob.create(p, {
implicitDescendants: false
})
for (const toDelete of await globber.glob()) {
cacheDebug(`Removing excluded file: ${toDelete}`)
await tryDelete(toDelete)
}
}
}
/**
* Determines the paths within Gradle User Home to cache.
* By default, this is the 'caches' and 'notifications' directories,
* but this can be overridden by the `gradle-home-cache-includes` parameter.
*/
protected getCachePath(): string[] {
const rawPaths: string[] = params.getCacheIncludes()
rawPaths.push(META_FILE_DIR)
const resolvedPaths = rawPaths.map(x => this.resolveCachePath(x))
cacheDebug(`Using cache paths: ${resolvedPaths}`)
return resolvedPaths
}
private resolveCachePath(rawPath: string): string {
if (rawPath.startsWith('!')) {
const resolved = this.resolveCachePath(rawPath.substring(1))
return `!${resolved}`
}
return path.resolve(this.gradleUserHome, rawPath)
}
private initializeGradleUserHome(): void {
// Create a directory for storing action metadata
const actionCacheDir = path.resolve(this.gradleUserHome, '.gradle-build-action')
fs.mkdirSync(actionCacheDir, {recursive: true})
this.copyInitScripts()
// Copy the default toolchain definitions to `~/.m2/toolchains.xml`
this.registerToolchains()
}
private copyInitScripts(): void {
// Copy init scripts from src/resources to Gradle UserHome
const initScriptsDir = path.resolve(this.gradleUserHome, 'init.d')
fs.mkdirSync(initScriptsDir, {recursive: true})
const initScriptFilenames = [
'gradle-build-action.build-result-capture.init.gradle',
'gradle-build-action.build-result-capture-service.plugin.groovy',
'gradle-build-action.github-dependency-graph.init.gradle',
'gradle-build-action.github-dependency-graph-gradle-plugin-apply.groovy',
'gradle-build-action.inject-develocity.init.gradle'
]
for (const initScriptFilename of initScriptFilenames) {
const initScriptContent = this.readResourceFileAsString('init-scripts', initScriptFilename)
const initScriptPath = path.resolve(initScriptsDir, initScriptFilename)
fs.writeFileSync(initScriptPath, initScriptContent)
}
}
private registerToolchains(): void {
const preInstalledToolchains = this.readResourceFileAsString('toolchains.xml')
const m2dir = path.resolve(this.userHome, '.m2')
const toolchainXmlTarget = path.resolve(m2dir, 'toolchains.xml')
if (!fs.existsSync(toolchainXmlTarget)) {
// Write a new toolchains.xml file if it doesn't exist
fs.mkdirSync(m2dir, {recursive: true})
fs.writeFileSync(toolchainXmlTarget, preInstalledToolchains)
core.info(`Wrote default JDK locations to ${toolchainXmlTarget}`)
} else {
// Merge into an existing toolchains.xml file
const existingToolchainContent = fs.readFileSync(toolchainXmlTarget, 'utf8')
const appendedContent = preInstalledToolchains.split('<toolchains>').pop()!
const mergedContent = existingToolchainContent.replace('</toolchains>', appendedContent)
fs.writeFileSync(toolchainXmlTarget, mergedContent)
core.info(`Merged default JDK locations into ${toolchainXmlTarget}`)
}
}
private readResourceFileAsString(...paths: string[]): string {
// Resolving relative to __dirname will allow node to find the resource at runtime
const absolutePath = path.resolve(__dirname, '..', '..', 'src', 'resources', ...paths)
return fs.readFileSync(absolutePath, 'utf8')
}
/**
* When cache debugging is enabled, this method will give a detailed report
* of the Gradle User Home contents.
*/
private async debugReportGradleUserHomeSize(label: string): Promise<void> {
if (!isCacheDebuggingEnabled()) {
return
}
if (!fs.existsSync(this.gradleUserHome)) {
return
}
const result = await exec.getExecOutput('du', ['-h', '-c', '-t', '5M'], {
cwd: this.gradleUserHome,
silent: true,
ignoreReturnCode: true
})
core.info(`Gradle User Home (directories >5M): ${label}`)
core.info(
result.stdout
.trimEnd()
.replace(/\t/g, ' ')
.split('\n')
.map(it => {
return ` ${it}`
})
.join('\n')
)
core.info('-----------------------')
}
}

View File

@ -1,69 +0,0 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as glob from '@actions/glob'
import fs from 'fs'
import path from 'path'
export class CacheCleaner {
private readonly gradleUserHome: string
private readonly tmpDir: string
constructor(gradleUserHome: string, tmpDir: string) {
this.gradleUserHome = gradleUserHome
this.tmpDir = tmpDir
}
async prepare(): Promise<void> {
// Reset the file-access journal so that files appear not to have been used recently
fs.rmSync(path.resolve(this.gradleUserHome, 'caches/journal-1'), {recursive: true, force: true})
fs.mkdirSync(path.resolve(this.gradleUserHome, 'caches/journal-1'), {recursive: true})
fs.writeFileSync(
path.resolve(this.gradleUserHome, 'caches/journal-1/file-access.properties'),
'inceptionTimestamp=0'
)
// Set the modification time of all files to the past: this timestamp is used when there is no matching entry in the journal
await this.ageAllFiles()
// Touch all 'gc' files so that cache cleanup won't run immediately.
await this.touchAllFiles('gc.properties')
}
async forceCleanup(): Promise<void> {
// Age all 'gc' files so that cache cleanup will run immediately.
await this.ageAllFiles('gc.properties')
// Run a dummy Gradle build to trigger cache cleanup
const cleanupProjectDir = path.resolve(this.tmpDir, 'dummy-cleanup-project')
fs.mkdirSync(cleanupProjectDir, {recursive: true})
fs.writeFileSync(
path.resolve(cleanupProjectDir, 'settings.gradle'),
'rootProject.name = "dummy-cleanup-project"'
)
fs.writeFileSync(path.resolve(cleanupProjectDir, 'build.gradle'), 'task("noop") {}')
const gradleCommand = `gradle -g ${this.gradleUserHome} --no-daemon --build-cache --no-scan --quiet -DGITHUB_DEPENDENCY_GRAPH_ENABLED=false noop`
await exec.exec(gradleCommand, [], {
cwd: cleanupProjectDir
})
}
private async ageAllFiles(fileName = '*'): Promise<void> {
core.debug(`Aging all files in Gradle User Home with name ${fileName}`)
await this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0))
}
private async touchAllFiles(fileName = '*'): Promise<void> {
core.debug(`Touching all files in Gradle User Home with name ${fileName}`)
await this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date())
}
private async setUtimes(pattern: string, timestamp: Date): Promise<void> {
const globber = await glob.create(pattern, {
implicitDescendants: false
})
for await (const file of globber.globGenerator()) {
fs.utimesSync(file, timestamp, timestamp)
}
}
}

View File

@ -1,467 +0,0 @@
import path from 'path'
import fs from 'fs'
import * as core from '@actions/core'
import * as glob from '@actions/glob'
import * as semver from 'semver'
import * as params from './input-params'
import {META_FILE_DIR} from './cache-base'
import {CacheEntryListener, CacheListener} from './cache-reporting'
import {cacheDebug, getCacheKeyPrefix, hashFileNames, restoreCache, saveCache, tryDelete} from './cache-utils'
import {BuildResult, loadBuildResults} from './build-results'
const SKIP_RESTORE_VAR = 'GRADLE_BUILD_ACTION_SKIP_RESTORE'
/**
* Represents the result of attempting to load or store an extracted cache entry.
* An undefined cacheKey indicates that the operation did not succeed.
* The collected results are then used to populate the `cache-metadata.json` file for later use.
*/
class ExtractedCacheEntry {
artifactType: string
pattern: string
cacheKey: string | undefined
constructor(artifactType: string, pattern: string, cacheKey: string | undefined) {
this.artifactType = artifactType
this.pattern = pattern
this.cacheKey = cacheKey
}
}
/**
* Representation of all of the extracted cache entries for this Gradle User Home.
* This object is persisted to JSON file in the Gradle User Home directory for storing,
* and subsequently used to restore the Gradle User Home.
*/
class ExtractedCacheEntryMetadata {
entries: ExtractedCacheEntry[] = []
}
/**
* The specification for a type of extracted cache entry.
*/
class ExtractedCacheEntryDefinition {
artifactType: string
pattern: string
bundle: boolean
uniqueFileNames = true
notCacheableReason: string | undefined
constructor(artifactType: string, pattern: string, bundle: boolean) {
this.artifactType = artifactType
this.pattern = pattern
this.bundle = bundle
}
/**
* Indicate that the file names matching the cache entry pattern are NOT sufficient to uniquely identify the contents.
* If the file names are sufficient, then we use a hash of the file names to identify the entry.
* With non-unique-file-names, we hash the file contents to identify the cache entry.
*/
withNonUniqueFileNames(): ExtractedCacheEntryDefinition {
this.uniqueFileNames = false
return this
}
/**
* Specify that the cache entry, should not be saved for some reason, even though the contents exist.
* This is used to prevent configuration-cache entries being cached when they were generated by Gradle < 8.6,
*/
notCacheableBecause(reason: string): ExtractedCacheEntryDefinition {
this.notCacheableReason = reason
return this
}
}
/**
* Caches and restores the entire Gradle User Home directory, extracting entries containing common artifacts
* for more efficient storage.
*/
abstract class AbstractEntryExtractor {
protected readonly gradleUserHome: string
private extractorName: string
constructor(gradleUserHome: string, extractorName: string) {
this.gradleUserHome = gradleUserHome
this.extractorName = extractorName
}
/**
* Restores any artifacts that were cached separately, based on the information in the `cache-metadata.json` file.
* Each extracted cache entry is restored in parallel, except when debugging is enabled.
*/
async restore(listener: CacheListener): Promise<void> {
const previouslyExtractedCacheEntries = this.loadExtractedCacheEntries()
const processes: Promise<ExtractedCacheEntry>[] = []
for (const cacheEntry of previouslyExtractedCacheEntries) {
const artifactType = cacheEntry.artifactType
const entryListener = listener.entry(cacheEntry.pattern)
// Handle case where the extracted-cache-entry definitions have been changed
const skipRestore = process.env[SKIP_RESTORE_VAR] || ''
if (skipRestore.includes(artifactType)) {
core.info(`Not restoring extracted cache entry for ${artifactType}`)
entryListener.markRequested('SKIP_RESTORE')
} else {
processes.push(
this.awaitForDebugging(
this.restoreExtractedCacheEntry(
artifactType,
cacheEntry.cacheKey!,
cacheEntry.pattern,
entryListener
)
)
)
}
}
this.saveMetadataForCacheResults(await Promise.all(processes))
}
private async restoreExtractedCacheEntry(
artifactType: string,
cacheKey: string,
pattern: string,
listener: CacheEntryListener
): Promise<ExtractedCacheEntry> {
const restoredEntry = await restoreCache([pattern], cacheKey, [], listener)
if (restoredEntry) {
core.info(`Restored ${artifactType} with key ${cacheKey} to ${pattern}`)
return new ExtractedCacheEntry(artifactType, pattern, cacheKey)
} else {
core.info(`Did not restore ${artifactType} with key ${cacheKey} to ${pattern}`)
return new ExtractedCacheEntry(artifactType, pattern, undefined)
}
}
/**
* Saves any artifacts that are configured to be cached separately, based on the extracted cache entry definitions.
* Each entry is extracted and saved in parallel, except when debugging is enabled.
*/
async extract(listener: CacheListener): Promise<void> {
// Load the cache entry definitions (from config) and the previously restored entries (from persisted metadata file)
const cacheEntryDefinitions = this.getExtractedCacheEntryDefinitions()
cacheDebug(
`Extracting cache entries for ${this.extractorName}: ${JSON.stringify(cacheEntryDefinitions, null, 2)}`
)
const previouslyRestoredEntries = this.loadExtractedCacheEntries()
const cacheActions: Promise<ExtractedCacheEntry>[] = []
// For each cache entry definition, determine if it has already been restored, and if not, extract it
for (const cacheEntryDefinition of cacheEntryDefinitions) {
const artifactType = cacheEntryDefinition.artifactType
const pattern = cacheEntryDefinition.pattern
if (cacheEntryDefinition.notCacheableReason) {
listener.entry(pattern).markNotSaved(cacheEntryDefinition.notCacheableReason)
continue
}
// Find all matching files for this cache entry definition
const globber = await glob.create(pattern, {
implicitDescendants: false
})
const matchingFiles = await globber.glob()
if (matchingFiles.length === 0) {
cacheDebug(`No files found to cache for ${artifactType}`)
continue
}
if (cacheEntryDefinition.bundle) {
// For an extracted "bundle", use the defined pattern and cache all matching files in a single entry.
cacheActions.push(
this.awaitForDebugging(
this.saveExtractedCacheEntry(
matchingFiles,
artifactType,
pattern,
cacheEntryDefinition.uniqueFileNames,
previouslyRestoredEntries,
listener.entry(pattern)
)
)
)
} else {
// Otherwise cache each matching file in a separate entry, using the complete file path as the cache pattern.
for (const cacheFile of matchingFiles) {
cacheActions.push(
this.awaitForDebugging(
this.saveExtractedCacheEntry(
[cacheFile],
artifactType,
cacheFile,
cacheEntryDefinition.uniqueFileNames,
previouslyRestoredEntries,
listener.entry(cacheFile)
)
)
)
}
}
}
this.saveMetadataForCacheResults(await Promise.all(cacheActions))
}
private async saveExtractedCacheEntry(
matchingFiles: string[],
artifactType: string,
pattern: string,
uniqueFileNames: boolean,
previouslyRestoredEntries: ExtractedCacheEntry[],
entryListener: CacheEntryListener
): Promise<ExtractedCacheEntry> {
const cacheKey = uniqueFileNames
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
: await this.createCacheKeyFromFileContents(artifactType, pattern)
const previouslyRestoredKey = previouslyRestoredEntries.find(
x => x.artifactType === artifactType && x.pattern === pattern
)?.cacheKey
if (previouslyRestoredKey === cacheKey) {
cacheDebug(`No change to previously restored ${artifactType}. Not saving.`)
entryListener.markNotSaved('contents unchanged')
} else {
core.info(`Caching ${artifactType} with path '${pattern}' and cache key: ${cacheKey}`)
await saveCache([pattern], cacheKey, entryListener)
}
for (const file of matchingFiles) {
tryDelete(file)
}
return new ExtractedCacheEntry(artifactType, pattern, cacheKey)
}
protected createCacheKeyFromFileNames(artifactType: string, files: string[]): string {
const cacheKeyPrefix = getCacheKeyPrefix()
const relativeFiles = files.map(x => path.relative(this.gradleUserHome, x))
const key = hashFileNames(relativeFiles)
cacheDebug(`Generating cache key for ${artifactType} from file names: ${relativeFiles}`)
return `${cacheKeyPrefix}${artifactType}-${key}`
}
protected async createCacheKeyFromFileContents(artifactType: string, pattern: string): Promise<string> {
const cacheKeyPrefix = getCacheKeyPrefix()
const key = await glob.hashFiles(pattern)
cacheDebug(`Generating cache key for ${artifactType} from files matching: ${pattern}`)
return `${cacheKeyPrefix}${artifactType}-${key}`
}
// Run actions sequentially if debugging is enabled
private async awaitForDebugging(p: Promise<ExtractedCacheEntry>): Promise<ExtractedCacheEntry> {
if (params.isCacheDebuggingEnabled()) {
await p
}
return p
}
/**
* Load information about the extracted cache entries previously restored/saved. This is loaded from the 'cache-metadata.json' file.
*/
protected loadExtractedCacheEntries(): ExtractedCacheEntry[] {
const cacheMetadataFile = this.getCacheMetadataFile()
if (!fs.existsSync(cacheMetadataFile)) {
return []
}
const filedata = fs.readFileSync(cacheMetadataFile, 'utf-8')
cacheDebug(`Loaded cache metadata for ${this.extractorName}: ${filedata}`)
const extractedCacheEntryMetadata = JSON.parse(filedata) as ExtractedCacheEntryMetadata
return extractedCacheEntryMetadata.entries
}
/**
* Saves information about the extracted cache entries into the 'cache-metadata.json' file.
*/
protected saveMetadataForCacheResults(results: ExtractedCacheEntry[]): void {
const extractedCacheEntryMetadata = new ExtractedCacheEntryMetadata()
extractedCacheEntryMetadata.entries = results.filter(x => x.cacheKey !== undefined)
const filedata = JSON.stringify(extractedCacheEntryMetadata)
cacheDebug(`Saving cache metadata for ${this.extractorName}: ${filedata}`)
fs.writeFileSync(this.getCacheMetadataFile(), filedata, 'utf-8')
}
private getCacheMetadataFile(): string {
const actionMetadataDirectory = path.resolve(this.gradleUserHome, META_FILE_DIR)
fs.mkdirSync(actionMetadataDirectory, {recursive: true})
return path.resolve(actionMetadataDirectory, `${this.extractorName}-entry-metadata.json`)
}
protected abstract getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[]
}
export class GradleHomeEntryExtractor extends AbstractEntryExtractor {
constructor(gradleUserHome: string) {
super(gradleUserHome, 'gradle-home')
}
async extract(listener: CacheListener): Promise<void> {
await this.deleteWrapperZips()
return super.extract(listener)
}
/**
* Delete any downloaded wrapper zip files that are not needed after extraction.
* These files are cleaned up by Gradle >= 7.5, but for older versions we remove them manually.
*/
private async deleteWrapperZips(): Promise<void> {
const wrapperZips = path.resolve(this.gradleUserHome, 'wrapper/dists/*/*/*.zip')
const globber = await glob.create(wrapperZips, {
implicitDescendants: false
})
for (const wrapperZip of await globber.glob()) {
cacheDebug(`Deleting wrapper zip: ${wrapperZip}`)
await tryDelete(wrapperZip)
}
}
/**
* Return the extracted cache entry definitions, which determine which artifacts will be cached
* separately from the rest of the Gradle User Home cache entry.
*/
protected getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[] {
const entryDefinition = (
artifactType: string,
patterns: string[],
bundle: boolean
): ExtractedCacheEntryDefinition => {
const resolvedPatterns = patterns
.map(x => {
const isDir = x.endsWith('/')
const resolved = path.resolve(this.gradleUserHome, x)
return isDir ? `${resolved}/` : resolved // Restore trailing '/' removed by path.resolve()
})
.join('\n')
return new ExtractedCacheEntryDefinition(artifactType, resolvedPatterns, bundle)
}
return [
entryDefinition('generated-gradle-jars', ['caches/*/generated-gradle-jars/*.jar'], false),
entryDefinition('wrapper-zips', ['wrapper/dists/*/*/'], false), // Each wrapper directory cached separately
entryDefinition('java-toolchains', ['jdks/*/'], false), // Each extracted JDK cached separately
entryDefinition('dependencies', ['caches/modules-*/files-*/*/*/*/*'], true),
entryDefinition('instrumented-jars', ['caches/jars-*/*'], true),
entryDefinition('kotlin-dsl', ['caches/*/kotlin-dsl/*/*'], true)
]
}
}
export class ConfigurationCacheEntryExtractor extends AbstractEntryExtractor {
constructor(gradleUserHome: string) {
super(gradleUserHome, 'configuration-cache')
}
/**
* Handle the case where Gradle User Home has not been fully restored, so that the configuration-cache
* entry is not reusable.
*/
async restore(listener: CacheListener): Promise<void> {
if (!listener.fullyRestored) {
this.markNotRestored(listener, 'Gradle User Home was not fully restored')
return
}
if (!params.getCacheEncryptionKey()) {
this.markNotRestored(listener, 'Encryption Key was not provided')
return
}
return await super.restore(listener)
}
private markNotRestored(listener: CacheListener, reason: string): void {
const cacheEntries = this.loadExtractedCacheEntries()
if (cacheEntries.length > 0) {
core.info(`Not restoring configuration-cache state, as ${reason}`)
for (const cacheEntry of cacheEntries) {
listener.entry(cacheEntry.pattern).markNotRestored(reason)
}
// Update the results file based on no entries restored
this.saveMetadataForCacheResults([])
}
}
async extract(listener: CacheListener): Promise<void> {
if (!params.getCacheEncryptionKey()) {
const cacheEntryDefinitions = this.getExtractedCacheEntryDefinitions()
if (cacheEntryDefinitions.length > 0) {
core.info('Not saving configuration-cache state, as no encryption key was provided')
for (const cacheEntry of cacheEntryDefinitions) {
listener.entry(cacheEntry.pattern).markNotSaved('No encryption key provided')
}
}
return
}
await super.extract(listener)
}
/**
* Extract cache entries for the configuration cache in each project.
*/
protected getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[] {
// Group BuildResult by existing configCacheDir
const groupedResults = this.getConfigCacheDirectoriesWithAssociatedBuildResults()
return Object.entries(groupedResults).map(([configCachePath, pathResults]) => {
// Create a entry definition for each unique configuration cache directory
const definition = new ExtractedCacheEntryDefinition(
'configuration-cache',
configCachePath,
true
).withNonUniqueFileNames()
// If any associated build result used Gradle < 8.6, then mark it as not cacheable
if (
pathResults.find(result => {
const gradleVersion = semver.coerce(result.gradleVersion)
return gradleVersion && semver.lt(gradleVersion, '8.6.0')
})
) {
core.info(
`Not saving config-cache data for ${configCachePath}. Configuration cache data is only saved for Gradle 8.6+`
)
definition.notCacheableBecause('Configuration cache data only saved for Gradle 8.6+')
}
return definition
})
}
private getConfigCacheDirectoriesWithAssociatedBuildResults(): Record<string, BuildResult[]> {
return loadBuildResults().reduce(
(acc, buildResult) => {
// For each build result, find the config-cache dir
const configCachePath = path.resolve(buildResult.rootProjectDir, '.gradle/configuration-cache')
// Ignore case where config-cache dir doesn't exist
if (!fs.existsSync(configCachePath)) {
return acc
}
// Group by unique config cache directories and collect associated build results
if (!acc[configCachePath]) {
acc[configCachePath] = []
}
acc[configCachePath].push(buildResult)
return acc
},
{} as Record<string, BuildResult[]>
)
}
}

View File

@ -1,220 +0,0 @@
import * as cache from '@actions/cache'
/**
* Collects information on what entries were saved and restored during the action.
* This information is used to generate a summary of the cache usage.
*/
export class CacheListener {
cacheEntries: CacheEntryListener[] = []
cacheReadOnly = false
cacheWriteOnly = false
cacheDisabled = false
cacheDisabledReason = 'disabled'
get fullyRestored(): boolean {
return this.cacheEntries.every(x => !x.wasRequestedButNotRestored())
}
get cacheStatus(): string {
if (!cache.isFeatureAvailable()) return 'not available'
if (this.cacheDisabled) return this.cacheDisabledReason
if (this.cacheWriteOnly) return 'write-only'
if (this.cacheReadOnly) return 'read-only'
return 'enabled'
}
entry(name: string): CacheEntryListener {
for (const entry of this.cacheEntries) {
if (entry.entryName === name) {
return entry
}
}
const newEntry = new CacheEntryListener(name)
this.cacheEntries.push(newEntry)
return newEntry
}
stringify(): string {
return JSON.stringify(this)
}
static rehydrate(stringRep: string): CacheListener {
if (stringRep === '') {
return new CacheListener()
}
const rehydrated: CacheListener = Object.assign(new CacheListener(), JSON.parse(stringRep))
const entries = rehydrated.cacheEntries
for (let index = 0; index < entries.length; index++) {
const rawEntry = entries[index]
entries[index] = Object.assign(new CacheEntryListener(rawEntry.entryName), rawEntry)
}
return rehydrated
}
}
/**
* Collects information on the state of a single cache entry.
*/
export class CacheEntryListener {
entryName: string
requestedKey: string | undefined
requestedRestoreKeys: string[] | undefined
restoredKey: string | undefined
restoredSize: number | undefined
notRestored: string | undefined
savedKey: string | undefined
savedSize: number | undefined
notSaved: string | undefined
constructor(entryName: string) {
this.entryName = entryName
}
wasRequestedButNotRestored(): boolean {
return this.requestedKey !== undefined && this.restoredKey === undefined
}
markRequested(key: string, restoreKeys: string[] = []): CacheEntryListener {
this.requestedKey = key
this.requestedRestoreKeys = restoreKeys
return this
}
markRestored(key: string, size: number | undefined): CacheEntryListener {
this.restoredKey = key
this.restoredSize = size
return this
}
markNotRestored(message: string): CacheEntryListener {
this.notRestored = message
return this
}
markSaved(key: string, size: number | undefined): CacheEntryListener {
this.savedKey = key
this.savedSize = size
return this
}
markAlreadyExists(key: string): CacheEntryListener {
this.savedKey = key
this.savedSize = 0
return this
}
markNotSaved(message: string): CacheEntryListener {
this.notSaved = message
return this
}
}
export function generateCachingReport(listener: CacheListener): string {
const entries = listener.cacheEntries
return `
<details>
<summary><h4>Caching for gradle-build-action was ${listener.cacheStatus} - expand for details</h4></summary>
${renderEntryTable(entries)}
<h5>Cache Entry Details</h5>
<pre>
${renderEntryDetails(listener)}
</pre>
</details>
`
}
function renderEntryTable(entries: CacheEntryListener[]): string {
return `
<table>
<tr><td></td><th>Count</th><th>Total Size (Mb)</th></tr>
<tr><td>Entries Restored</td>
<td>${getCount(entries, e => e.restoredSize)}</td>
<td>${getSize(entries, e => e.restoredSize)}</td>
</tr>
<tr><td>Entries Saved</td>
<td>${getCount(entries, e => e.savedSize)}</td>
<td>${getSize(entries, e => e.savedSize)}</td>
</tr>
</table>
`
}
function renderEntryDetails(listener: CacheListener): string {
return listener.cacheEntries
.map(
entry => `Entry: ${entry.entryName}
Requested Key : ${entry.requestedKey ?? ''}
Restored Key : ${entry.restoredKey ?? ''}
Size: ${formatSize(entry.restoredSize)}
${getRestoredMessage(entry, listener.cacheWriteOnly)}
Saved Key : ${entry.savedKey ?? ''}
Size: ${formatSize(entry.savedSize)}
${getSavedMessage(entry, listener.cacheReadOnly)}
`
)
.join('---\n')
}
function getRestoredMessage(entry: CacheEntryListener, cacheWriteOnly: boolean): string {
if (entry.notRestored) {
return `(Entry not restored: ${entry.notRestored})`
}
if (cacheWriteOnly) {
return '(Entry not restored: cache is write-only)'
}
if (entry.requestedKey === undefined) {
return '(Entry not restored: not requested)'
}
if (entry.restoredKey === undefined) {
return '(Entry not restored: no match found)'
}
if (entry.restoredKey === entry.requestedKey) {
return '(Entry restored: exact match found)'
}
return '(Entry restored: partial match found)'
}
function getSavedMessage(entry: CacheEntryListener, cacheReadOnly: boolean): string {
if (entry.notSaved) {
return `(Entry not saved: ${entry.notSaved})`
}
if (entry.savedKey === undefined) {
if (cacheReadOnly) {
return '(Entry not saved: cache is read-only)'
}
if (entry.notRestored) {
return '(Entry not saved: not restored)'
}
return '(Entry not saved: reason unknown)'
}
if (entry.savedSize === 0) {
return '(Entry not saved: entry with key already exists)'
}
return '(Entry saved)'
}
function getCount(
cacheEntries: CacheEntryListener[],
predicate: (value: CacheEntryListener) => number | undefined
): number {
return cacheEntries.filter(e => predicate(e)).length
}
function getSize(
cacheEntries: CacheEntryListener[],
predicate: (value: CacheEntryListener) => number | undefined
): number {
const bytes = cacheEntries.map(e => predicate(e) ?? 0).reduce((p, v) => p + v, 0)
return Math.round(bytes / (1024 * 1024))
}
function formatSize(bytes: number | undefined): string {
if (bytes === undefined || bytes === 0) {
return ''
}
return `${Math.round(bytes / (1024 * 1024))} MB (${bytes} B)`
}

View File

@ -1,250 +0,0 @@
import * as core from '@actions/core'
import * as cache from '@actions/cache'
import * as github from '@actions/github'
import * as exec from '@actions/exec'
import * as crypto from 'crypto'
import * as path from 'path'
import * as fs from 'fs'
import * as params from './input-params'
import {CacheEntryListener} from './cache-reporting'
const CACHE_PROTOCOL_VERSION = 'v9-'
const CACHE_KEY_PREFIX_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX'
const CACHE_KEY_OS_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_ENVIRONMENT'
const CACHE_KEY_JOB_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB'
const CACHE_KEY_JOB_INSTANCE_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_INSTANCE'
const CACHE_KEY_JOB_EXECUTION_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_EXECUTION'
const SEGMENT_DOWNLOAD_TIMEOUT_VAR = 'SEGMENT_DOWNLOAD_TIMEOUT_MINS'
const SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT = 10 * 60 * 1000 // 10 minutes
export function isCacheDisabled(): boolean {
if (!cache.isFeatureAvailable()) {
return true
}
return params.isCacheDisabled()
}
export function isCacheReadOnly(): boolean {
return !isCacheWriteOnly() && params.isCacheReadOnly()
}
export function isCacheWriteOnly(): boolean {
return params.isCacheWriteOnly()
}
export function isCacheOverwriteExisting(): boolean {
return params.isCacheOverwriteExisting()
}
export function isCacheDebuggingEnabled(): boolean {
return params.isCacheDebuggingEnabled()
}
export function isCacheCleanupEnabled(): boolean {
return params.isCacheCleanupEnabled()
}
/**
* Represents a key used to restore a cache entry.
* The Github Actions cache will first try for an exact match on the key.
* If that fails, it will try for a prefix match on any of the restoreKeys.
*/
export class CacheKey {
key: string
restoreKeys: string[]
constructor(key: string, restoreKeys: string[]) {
this.key = key
this.restoreKeys = restoreKeys
}
}
/**
* Generates a cache key specific to the current job execution.
* The key is constructed from the following inputs (with some user overrides):
* - The cache protocol version
* - The name of the cache
* - The runner operating system
* - The name of the workflow and Job being executed
* - The matrix values for the Job being executed (job context)
* - The SHA of the commit being executed
*
* Caches are restored by trying to match the these key prefixes in order:
* - The full key with SHA
* - A previous key for this Job + matrix
* - Any previous key for this Job (any matrix)
* - Any previous key for this cache on the current OS
*/
export function generateCacheKey(cacheName: string): CacheKey {
const cacheKeyBase = `${getCacheKeyPrefix()}${CACHE_PROTOCOL_VERSION}${cacheName}`
// At the most general level, share caches for all executions on the same OS
const cacheKeyForEnvironment = `${cacheKeyBase}|${getCacheKeyEnvironment()}`
// Then prefer caches that run job with the same ID
const cacheKeyForJob = `${cacheKeyForEnvironment}|${getCacheKeyJob()}`
// Prefer (even more) jobs that run this job in the same workflow with the same context (matrix)
const cacheKeyForJobContext = `${cacheKeyForJob}[${getCacheKeyJobInstance()}]`
// Exact match on Git SHA
const cacheKey = `${cacheKeyForJobContext}-${getCacheKeyJobExecution()}`
if (params.isCacheStrictMatch()) {
return new CacheKey(cacheKey, [cacheKeyForJobContext])
}
return new CacheKey(cacheKey, [cacheKeyForJobContext, cacheKeyForJob, cacheKeyForEnvironment])
}
export function getCacheKeyPrefix(): string {
// Prefix can be used to force change all cache keys (defaults to cache protocol version)
return process.env[CACHE_KEY_PREFIX_VAR] || ''
}
function getCacheKeyEnvironment(): string {
const runnerOs = process.env['RUNNER_OS'] || ''
return process.env[CACHE_KEY_OS_VAR] || runnerOs
}
function getCacheKeyJob(): string {
return process.env[CACHE_KEY_JOB_VAR] || github.context.job
}
function getCacheKeyJobInstance(): string {
const override = process.env[CACHE_KEY_JOB_INSTANCE_VAR]
if (override) {
return override
}
// By default, we hash the workflow name and the full `matrix` data for the run, to uniquely identify this job invocation
// The only way we can obtain the `matrix` data is via the `workflow-job-context` parameter in action.yml.
const workflowName = github.context.workflow
const workflowJobContext = params.getJobMatrix()
return hashStrings([workflowName, workflowJobContext])
}
function getCacheKeyJobExecution(): string {
// Used to associate a cache key with a particular execution (default is bound to the git commit sha)
return process.env[CACHE_KEY_JOB_EXECUTION_VAR] || github.context.sha
}
export function hashFileNames(fileNames: string[]): string {
return hashStrings(fileNames.map(x => x.replace(new RegExp(`\\${path.sep}`, 'g'), '/')))
}
export function hashStrings(values: string[]): string {
const hash = crypto.createHash('md5')
for (const value of values) {
hash.update(value)
}
return hash.digest('hex')
}
export async function restoreCache(
cachePath: string[],
cacheKey: string,
cacheRestoreKeys: string[],
listener: CacheEntryListener
): Promise<cache.CacheEntry | undefined> {
listener.markRequested(cacheKey, cacheRestoreKeys)
try {
// Only override the read timeout if the SEGMENT_DOWNLOAD_TIMEOUT_MINS env var has NOT been set
const cacheRestoreOptions = process.env[SEGMENT_DOWNLOAD_TIMEOUT_VAR]
? {}
: {segmentTimeoutInMs: SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT}
const restoredEntry = await cache.restoreCache(cachePath, cacheKey, cacheRestoreKeys, cacheRestoreOptions)
if (restoredEntry !== undefined) {
listener.markRestored(restoredEntry.key, restoredEntry.size)
}
return restoredEntry
} catch (error) {
listener.markNotRestored((error as Error).message)
handleCacheFailure(error, `Failed to restore ${cacheKey}`)
return undefined
}
}
export async function saveCache(cachePath: string[], cacheKey: string, listener: CacheEntryListener): Promise<void> {
try {
const savedEntry = await cache.saveCache(cachePath, cacheKey)
listener.markSaved(savedEntry.key, savedEntry.size)
} catch (error) {
if (error instanceof cache.ReserveCacheError) {
listener.markAlreadyExists(cacheKey)
} else {
listener.markNotSaved((error as Error).message)
}
handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`)
}
}
export function cacheDebug(message: string): void {
if (isCacheDebuggingEnabled()) {
core.info(message)
} else {
core.debug(message)
}
}
export function handleCacheFailure(error: unknown, message: string): void {
if (error instanceof cache.ValidationError) {
// Fail on cache validation errors
throw error
}
if (error instanceof cache.ReserveCacheError) {
// Reserve cache errors are expected if the artifact has been previously cached
core.info(`${message}: ${error}`)
} else {
// Warn on all other errors
core.warning(`${message}: ${error}`)
if (error instanceof Error && error.stack) {
cacheDebug(error.stack)
}
}
}
/**
* Attempt to delete a file or directory, waiting to allow locks to be released
*/
export async function tryDelete(file: string): Promise<void> {
const maxAttempts = 5
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
if (!fs.existsSync(file)) {
return
}
try {
const stat = fs.lstatSync(file)
if (stat.isDirectory()) {
fs.rmSync(file, {recursive: true})
} else {
fs.unlinkSync(file)
}
return
} catch (error) {
if (attempt === maxAttempts) {
core.warning(`Failed to delete ${file}, which will impact caching.
It is likely locked by another process. Output of 'jps -ml':
${await getJavaProcesses()}`)
throw error
} else {
cacheDebug(`Attempt to delete ${file} failed. Will try again.`)
await delay(1000)
}
}
}
}
async function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
async function getJavaProcesses(): Promise<string> {
const jpsOutput = await exec.getExecOutput('jps', ['-lm'])
return jpsOutput.stdout
}

View File

@ -1,104 +0,0 @@
import * as core from '@actions/core'
import {
isCacheCleanupEnabled,
isCacheDisabled,
isCacheReadOnly,
isCacheWriteOnly,
isCacheOverwriteExisting
} from './cache-utils'
import {CacheListener} from './cache-reporting'
import {DaemonController} from './daemon-controller'
import {GradleStateCache} from './cache-base'
import {CacheCleaner} from './cache-cleaner'
const CACHE_RESTORED_VAR = 'GRADLE_BUILD_ACTION_CACHE_RESTORED'
export async function restore(userHome: string, gradleUserHome: string, cacheListener: CacheListener): Promise<void> {
// Bypass restore cache on all but first action step in workflow.
if (process.env[CACHE_RESTORED_VAR]) {
core.info('Cache only restored on first action step.')
return
}
core.exportVariable(CACHE_RESTORED_VAR, true)
const gradleStateCache = new GradleStateCache(userHome, gradleUserHome)
if (isCacheDisabled()) {
core.info('Cache is disabled: will not restore state from previous builds.')
// Initialize the Gradle User Home even when caching is disabled.
gradleStateCache.init()
cacheListener.cacheDisabled = true
return
}
if (gradleStateCache.cacheOutputExists()) {
if (!isCacheOverwriteExisting()) {
core.info('Gradle User Home already exists: will not restore from cache.')
// Initialize pre-existing Gradle User Home.
gradleStateCache.init()
cacheListener.cacheDisabled = true
cacheListener.cacheDisabledReason = 'disabled due to pre-existing Gradle User Home'
return
}
core.info('Gradle User Home already exists: will overwrite with cached contents.')
}
gradleStateCache.init()
// Mark the state as restored so that post-action will perform save.
core.saveState(CACHE_RESTORED_VAR, true)
if (isCacheWriteOnly()) {
core.info('Cache is write-only: will not restore from cache.')
cacheListener.cacheWriteOnly = true
return
}
await core.group('Restore Gradle state from cache', async () => {
await gradleStateCache.restore(cacheListener)
})
if (isCacheCleanupEnabled() && !isCacheReadOnly()) {
core.info('Preparing cache for cleanup.')
const cacheCleaner = new CacheCleaner(gradleUserHome, process.env['RUNNER_TEMP']!)
await cacheCleaner.prepare()
}
}
export async function save(
userHome: string,
gradleUserHome: string,
cacheListener: CacheListener,
daemonController: DaemonController
): Promise<void> {
if (isCacheDisabled()) {
core.info('Cache is disabled: will not save state for later builds.')
return
}
if (!core.getState(CACHE_RESTORED_VAR)) {
core.info('Cache will not be saved: not restored in main action step.')
return
}
if (isCacheReadOnly()) {
core.info('Cache is read-only: will not save state for use in subsequent builds.')
cacheListener.cacheReadOnly = true
return
}
await daemonController.stopAllDaemons()
if (isCacheCleanupEnabled()) {
core.info('Forcing cache cleanup.')
const cacheCleaner = new CacheCleaner(gradleUserHome, process.env['RUNNER_TEMP']!)
try {
await cacheCleaner.forceCleanup()
} catch (e) {
core.warning(`Cache cleanup failed. Will continue. ${String(e)}`)
}
}
await core.group('Caching Gradle state', async () => {
return new GradleStateCache(userHome, gradleUserHome).save(cacheListener)
})
}

View File

@ -1,36 +0,0 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as fs from 'fs'
import * as path from 'path'
import {BuildResult} from './build-results'
export class DaemonController {
private readonly gradleHomes
constructor(buildResults: BuildResult[]) {
const allHomes = buildResults.map(buildResult => buildResult.gradleHomeDir)
this.gradleHomes = Array.from(new Set(allHomes))
}
async stopAllDaemons(): Promise<void> {
core.info('Stopping all Gradle daemons before saving Gradle User Home state')
const executions: Promise<number>[] = []
const args = ['--stop']
for (const gradleHome of this.gradleHomes) {
const executable = path.resolve(gradleHome, 'bin', 'gradle')
if (!fs.existsSync(executable)) {
core.warning(`Gradle executable not found at ${executable}. Could not stop Gradle daemons.`)
continue
}
core.info(`Stopping Gradle daemons for ${gradleHome}`)
executions.push(
exec.exec(executable, args, {
ignoreReturnCode: true
})
)
}
await Promise.all(executions)
}
}

View File

@ -1,247 +0,0 @@
import * as core from '@actions/core'
import * as github from '@actions/github'
import * as glob from '@actions/glob'
import {DefaultArtifactClient} from '@actions/artifact'
import {GitHub} from '@actions/github/lib/utils'
import {RequestError} from '@octokit/request-error'
import type {PullRequestEvent} from '@octokit/webhooks-types'
import * as path from 'path'
import fs from 'fs'
import * as layout from './repository-layout'
import {PostActionJobFailure} from './errors'
import {
DependencyGraphOption,
getDependencyGraphContinueOnFailure,
getJobMatrix,
getArtifactRetentionDays
} from './input-params'
const DEPENDENCY_GRAPH_PREFIX = 'dependency-graph_'
export async function setup(option: DependencyGraphOption): Promise<void> {
if (option === DependencyGraphOption.Disabled) {
return
}
// Download and submit early, for compatability with dependency review.
if (option === DependencyGraphOption.DownloadAndSubmit) {
await downloadAndSubmitDependencyGraphs()
return
}
core.info('Enabling dependency graph generation')
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_ENABLED', 'true')
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE', getDependencyGraphContinueOnFailure())
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR', getJobCorrelator())
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_JOB_ID', github.context.runId)
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_REF', github.context.ref)
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_SHA', getShaFromContext())
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_WORKSPACE', layout.workspaceDirectory())
maybeExportVariable(
'DEPENDENCY_GRAPH_REPORT_DIR',
path.resolve(layout.workspaceDirectory(), 'dependency-graph-reports')
)
// To clear the dependency graph, we generate an empty graph by excluding all projects and configurations
if (option === DependencyGraphOption.Clear) {
core.exportVariable('DEPENDENCY_GRAPH_INCLUDE_PROJECTS', '')
core.exportVariable('DEPENDENCY_GRAPH_INCLUDE_CONFIGURATIONS', '')
}
}
function maybeExportVariable(variableName: string, value: unknown): void {
if (!process.env[variableName]) {
core.exportVariable(variableName, value)
}
}
export async function complete(option: DependencyGraphOption): Promise<void> {
try {
switch (option) {
case DependencyGraphOption.Disabled:
case DependencyGraphOption.Generate: // Performed via init-script: nothing to do here
case DependencyGraphOption.DownloadAndSubmit: // Performed in setup
return
case DependencyGraphOption.GenerateAndSubmit:
case DependencyGraphOption.Clear: // Submit the empty dependency graph
await submitDependencyGraphs(await findGeneratedDependencyGraphFiles())
return
case DependencyGraphOption.GenerateAndUpload:
await uploadDependencyGraphs(await findGeneratedDependencyGraphFiles())
}
} catch (e) {
warnOrFail(option, e)
}
}
async function findGeneratedDependencyGraphFiles(): Promise<string[]> {
const workspaceDirectory = layout.workspaceDirectory()
return await findDependencyGraphFiles(workspaceDirectory)
}
async function uploadDependencyGraphs(dependencyGraphFiles: string[]): Promise<void> {
const workspaceDirectory = layout.workspaceDirectory()
const artifactClient = new DefaultArtifactClient()
for (const dependencyGraphFile of dependencyGraphFiles) {
const relativePath = getRelativePathFromWorkspace(dependencyGraphFile)
core.info(`Uploading dependency graph file: ${relativePath}`)
const artifactName = `${DEPENDENCY_GRAPH_PREFIX}${path.basename(dependencyGraphFile)}`
await artifactClient.uploadArtifact(artifactName, [dependencyGraphFile], workspaceDirectory, {
retentionDays: getArtifactRetentionDays()
})
}
}
async function downloadAndSubmitDependencyGraphs(): Promise<void> {
try {
await submitDependencyGraphs(await downloadDependencyGraphs())
} catch (e) {
warnOrFail(DependencyGraphOption.DownloadAndSubmit, e)
}
}
async function submitDependencyGraphs(dependencyGraphFiles: string[]): Promise<void> {
for (const jsonFile of dependencyGraphFiles) {
try {
await submitDependencyGraphFile(jsonFile)
} catch (error) {
if (error instanceof RequestError) {
throw new Error(translateErrorMessage(jsonFile, error))
} else {
throw error
}
}
}
}
function translateErrorMessage(jsonFile: string, error: RequestError): string {
const relativeJsonFile = getRelativePathFromWorkspace(jsonFile)
const mainWarning = `Dependency submission failed for ${relativeJsonFile}.\n${String(error)}`
if (error.message === 'Resource not accessible by integration') {
return `${mainWarning}
Please ensure that the 'contents: write' permission is available for the workflow job.
Note that this permission is never available for a 'pull_request' trigger from a repository fork.
`
}
return mainWarning
}
async function submitDependencyGraphFile(jsonFile: string): Promise<void> {
const octokit = getOctokit()
const jsonContent = fs.readFileSync(jsonFile, 'utf8')
const jsonObject = JSON.parse(jsonContent)
jsonObject.owner = github.context.repo.owner
jsonObject.repo = github.context.repo.repo
const response = await octokit.request('POST /repos/{owner}/{repo}/dependency-graph/snapshots', jsonObject)
const relativeJsonFile = getRelativePathFromWorkspace(jsonFile)
core.notice(`Submitted ${relativeJsonFile}: ${response.data.message}`)
}
async function downloadDependencyGraphs(): Promise<string[]> {
const workspaceDirectory = layout.workspaceDirectory()
const findBy = github.context.payload.workflow_run
? {
token: getGithubToken(),
workflowRunId: github.context.payload.workflow_run.id,
repositoryName: github.context.repo.repo,
repositoryOwner: github.context.repo.owner
}
: undefined
const artifactClient = new DefaultArtifactClient()
const downloadPath = path.resolve(workspaceDirectory, 'dependency-graph')
const dependencyGraphArtifacts = (
await artifactClient.listArtifacts({
latest: true,
findBy
})
).artifacts.filter(candidate => candidate.name.startsWith(DEPENDENCY_GRAPH_PREFIX))
for (const artifact of dependencyGraphArtifacts) {
const downloadedArtifact = await artifactClient.downloadArtifact(artifact.id, {
path: downloadPath,
findBy
})
core.info(`Downloading dependency-graph artifact ${artifact.name} to ${downloadedArtifact.downloadPath}`)
}
return findDependencyGraphFiles(downloadPath)
}
async function findDependencyGraphFiles(dir: string): Promise<string[]> {
const globber = await glob.create(`${dir}/dependency-graph-reports/*.json`)
const graphFiles = globber.glob()
return graphFiles
}
function warnOrFail(option: String, error: unknown): void {
if (!getDependencyGraphContinueOnFailure()) {
throw new PostActionJobFailure(error)
}
core.warning(`Failed to ${option} dependency graph. Will continue.\n${String(error)}`)
}
function getOctokit(): InstanceType<typeof GitHub> {
return github.getOctokit(getGithubToken())
}
function getGithubToken(): string {
return core.getInput('github-token', {required: true})
}
function getRelativePathFromWorkspace(file: string): string {
const workspaceDirectory = layout.workspaceDirectory()
return path.relative(workspaceDirectory, file)
}
function getShaFromContext(): string {
const context = github.context
const pullRequestEvents = [
'pull_request',
'pull_request_comment',
'pull_request_review',
'pull_request_review_comment'
// Note that pull_request_target is omitted here.
// That event runs in the context of the base commit of the PR,
// so the snapshot should not be associated with the head commit.
]
if (pullRequestEvents.includes(context.eventName)) {
const pr = (context.payload as PullRequestEvent).pull_request
return pr.head.sha
} else {
return context.sha
}
}
function getJobCorrelator(): string {
return constructJobCorrelator(github.context.workflow, github.context.job, getJobMatrix())
}
export function constructJobCorrelator(workflow: string, jobId: string, matrixJson: string): string {
const matrixString = describeMatrix(matrixJson)
const label = matrixString ? `${workflow}-${jobId}-${matrixString}` : `${workflow}-${jobId}`
return sanitize(label)
}
function describeMatrix(matrixJson: string): string {
core.debug(`Got matrix json: ${matrixJson}`)
const matrix = JSON.parse(matrixJson)
if (matrix) {
return Object.values(matrix).join('-')
}
return ''
}
function sanitize(value: string): string {
return value
.replace(/[^a-zA-Z0-9_-\s]/g, '')
.replace(/\s+/g, '_')
.toLowerCase()
}

View File

@ -1,11 +0,0 @@
export class PostActionJobFailure extends Error {
constructor(error: unknown) {
if (error instanceof Error) {
super(error.message)
this.name = error.name
this.stack = error.stack
} else {
super(String(error))
}
}
}

View File

@ -1,17 +0,0 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as gradlew from './gradlew'
export async function executeGradleBuild(executable: string | undefined, root: string, args: string[]): Promise<void> {
// Use the provided executable, or look for a Gradle wrapper script to run
const toExecute = executable ?? gradlew.gradleWrapperScript(root)
const status: number = await exec.exec(toExecute, args, {
cwd: root,
ignoreReturnCode: true
})
if (status !== 0) {
core.setFailed(`Gradle build failed: see console output for details`)
}
}

View File

@ -1,42 +0,0 @@
import * as path from 'path'
import fs from 'fs'
const IS_WINDOWS = process.platform === 'win32'
export function wrapperScriptFilename(): string {
return IS_WINDOWS ? './gradlew.bat' : './gradlew'
}
export function installScriptFilename(): string {
return IS_WINDOWS ? 'gradle.bat' : 'gradle'
}
export function gradleWrapperScript(buildRootDirectory: string): string {
validateGradleWrapper(buildRootDirectory)
return wrapperScriptFilename()
}
function validateGradleWrapper(buildRootDirectory: string): void {
const wrapperScript = path.resolve(buildRootDirectory, wrapperScriptFilename())
verifyExists(wrapperScript, 'Gradle Wrapper script')
verifyIsExecutableScript(wrapperScript)
const wrapperProperties = path.resolve(buildRootDirectory, 'gradle/wrapper/gradle-wrapper.properties')
verifyExists(wrapperProperties, 'Gradle wrapper properties file')
}
function verifyExists(file: string, description: string): void {
if (!fs.existsSync(file)) {
throw new Error(
`Cannot locate ${description} at '${file}'. Specify 'gradle-version' for projects without Gradle wrapper configured.`
)
}
}
function verifyIsExecutableScript(toExecute: string): void {
try {
fs.accessSync(toExecute, fs.constants.X_OK)
} catch (err) {
throw new Error(`Gradle script '${toExecute}' is not executable.`)
}
}

View File

@ -1,171 +0,0 @@
import * as core from '@actions/core'
import {parseArgsStringToArgv} from 'string-argv'
export function isCacheDisabled(): boolean {
return getBooleanInput('cache-disabled')
}
export function isCacheReadOnly(): boolean {
return getBooleanInput('cache-read-only')
}
export function isCacheWriteOnly(): boolean {
return getBooleanInput('cache-write-only')
}
export function isCacheOverwriteExisting(): boolean {
return getBooleanInput('cache-overwrite-existing')
}
export function isCacheStrictMatch(): boolean {
return getBooleanInput('gradle-home-cache-strict-match')
}
export function isCacheDebuggingEnabled(): boolean {
return process.env['GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED'] ? true : false
}
export function isCacheCleanupEnabled(): boolean {
return getBooleanInput('gradle-home-cache-cleanup')
}
export function getCacheEncryptionKey(): string {
return core.getInput('cache-encryption-key')
}
export function getCacheIncludes(): string[] {
return core.getMultilineInput('gradle-home-cache-includes')
}
export function getCacheExcludes(): string[] {
return core.getMultilineInput('gradle-home-cache-excludes')
}
export function getBuildRootDirectory(): string {
return core.getInput('build-root-directory')
}
export function getGradleVersion(): string {
return core.getInput('gradle-version')
}
export function getArguments(): string[] {
const input = core.getInput('arguments')
return parseArgsStringToArgv(input)
}
// Internal parameters
export function getJobMatrix(): string {
return core.getInput('workflow-job-context')
}
export function getGithubToken(): string {
return core.getInput('github-token', {required: true})
}
export function isJobSummaryEnabled(): boolean {
return getBooleanInput('generate-job-summary', true)
}
export function getJobSummaryOption(): JobSummaryOption {
return parseJobSummaryOption('add-job-summary')
}
export function getPRCommentOption(): JobSummaryOption {
return parseJobSummaryOption('add-job-summary-as-pr-comment')
}
export function getBuildScanPublishEnabled(): boolean {
return getBooleanInput('build-scan-publish')
}
export function getBuildScanTermsOfServiceUrl(): string {
return core.getInput('build-scan-terms-of-service-url')
}
export function getBuildScanTermsOfServiceAgree(): string {
return core.getInput('build-scan-terms-of-service-agree')
}
function parseJobSummaryOption(paramName: string): JobSummaryOption {
const val = core.getInput(paramName)
switch (val.toLowerCase().trim()) {
case 'never':
return JobSummaryOption.Never
case 'always':
return JobSummaryOption.Always
case 'on-failure':
return JobSummaryOption.OnFailure
}
throw TypeError(`The value '${val}' is not valid for ${paramName}. Valid values are: [never, always, on-failure].`)
}
export function getDependencyGraphOption(): DependencyGraphOption {
const val = core.getInput('dependency-graph')
switch (val.toLowerCase().trim()) {
case 'disabled':
return DependencyGraphOption.Disabled
case 'generate':
return DependencyGraphOption.Generate
case 'generate-and-submit':
return DependencyGraphOption.GenerateAndSubmit
case 'generate-and-upload':
return DependencyGraphOption.GenerateAndUpload
case 'download-and-submit':
return DependencyGraphOption.DownloadAndSubmit
case 'clear':
return DependencyGraphOption.Clear
}
throw TypeError(
`The value '${val}' is not valid for 'dependency-graph'. Valid values are: [disabled, generate, generate-and-submit, generate-and-upload, download-and-submit, clear]. The default value is 'disabled'.`
)
}
export function getDependencyGraphContinueOnFailure(): boolean {
return getBooleanInput('dependency-graph-continue-on-failure', true)
}
export function getArtifactRetentionDays(): number {
const val = core.getInput('artifact-retention-days')
return parseNumericInput('artifact-retention-days', val, 0)
// Zero indicates that the default repository settings should be used
}
export function parseNumericInput(paramName: string, paramValue: string, paramDefault: number): number {
if (paramValue.length === 0) {
return paramDefault
}
const numericValue = parseInt(paramValue)
if (isNaN(numericValue)) {
throw TypeError(`The value '${paramValue}' is not a valid numeric value for '${paramName}'.`)
}
return numericValue
}
function getBooleanInput(paramName: string, paramDefault = false): boolean {
const paramValue = core.getInput(paramName)
switch (paramValue.toLowerCase().trim()) {
case '':
return paramDefault
case 'false':
return false
case 'true':
return true
}
throw TypeError(`The value '${paramValue} is not valid for '${paramName}. Valid values are: [true, false]`)
}
export enum DependencyGraphOption {
Disabled = 'disabled',
Generate = 'generate',
GenerateAndSubmit = 'generate-and-submit',
GenerateAndUpload = 'generate-and-upload',
DownloadAndSubmit = 'download-and-submit',
Clear = 'clear'
}
export enum JobSummaryOption {
Never = 'never',
Always = 'always',
OnFailure = 'on-failure'
}

View File

@ -1,157 +0,0 @@
import * as core from '@actions/core'
import * as github from '@actions/github'
import {SUMMARY_ENV_VAR} from '@actions/core/lib/summary'
import {RequestError} from '@octokit/request-error'
import * as params from './input-params'
import {BuildResult} from './build-results'
import {CacheListener, generateCachingReport} from './cache-reporting'
export async function generateJobSummary(buildResults: BuildResult[], cacheListener: CacheListener): Promise<void> {
const summaryTable = renderSummaryTable(buildResults)
const cachingReport = generateCachingReport(cacheListener)
if (shouldGenerateJobSummary(buildResults)) {
core.info('Generating Job Summary')
core.summary.addRaw(summaryTable)
core.summary.addRaw(cachingReport)
await core.summary.write()
} else {
core.info('============================')
core.info(summaryTable)
core.info('============================')
core.info(cachingReport)
core.info('============================')
}
if (shouldAddPRComment(buildResults)) {
await addPRComment(summaryTable)
}
}
async function addPRComment(jobSummary: string): Promise<void> {
const context = github.context
if (context.payload.pull_request == null) {
core.info('No pull_request trigger: not adding PR comment')
return
}
const pull_request_number = context.payload.pull_request.number
core.info(`Adding Job Summary as comment to PR #${pull_request_number}.`)
const prComment = `<h3>Job Summary for gradle-build-action</h3>
<h5>${github.context.workflow} :: <em>${github.context.job}</em></h5>
${jobSummary}`
const github_token = params.getGithubToken()
const octokit = github.getOctokit(github_token)
try {
await octokit.rest.issues.createComment({
...context.repo,
issue_number: pull_request_number,
body: prComment
})
} catch (error) {
if (error instanceof RequestError) {
core.warning(buildWarningMessage(error))
} else {
throw error
}
}
}
function buildWarningMessage(error: RequestError): string {
const mainWarning = `Failed to generate PR comment.\n${String(error)}`
if (error.message === 'Resource not accessible by integration') {
return `${mainWarning}
Please ensure that the 'pull-requests: write' permission is available for the workflow job.
Note that this permission is never available for a workflow triggered from a repository fork.
`
}
return mainWarning
}
function renderSummaryTable(results: BuildResult[]): string {
if (results.length === 0) {
return 'No Gradle build results detected.'
}
return `
<table>
<tr>
<th>Gradle Root Project</th>
<th>Requested Tasks</th>
<th>Gradle Version</th>
<th>Build Outcome</th>
<th>Build Scan®</th>
</tr>${results.map(result => renderBuildResultRow(result)).join('')}
</table>
`
}
function renderBuildResultRow(result: BuildResult): string {
return `
<tr>
<td>${result.rootProjectName}</td>
<td>${result.requestedTasks}</td>
<td align='center'>${result.gradleVersion}</td>
<td align='center'>${renderOutcome(result)}</td>
<td>${renderBuildScan(result)}</td>
</tr>`
}
function renderOutcome(result: BuildResult): string {
return result.buildFailed ? ':x:' : ':white_check_mark:'
}
function renderBuildScan(result: BuildResult): string {
if (result.buildScanFailed) {
return renderBuildScanBadge(
'PUBLISH_FAILED',
'orange',
'https://docs.gradle.com/enterprise/gradle-plugin/#troubleshooting'
)
}
if (result.buildScanUri) {
return renderBuildScanBadge('PUBLISHED', '06A0CE', result.buildScanUri)
}
return renderBuildScanBadge('NOT_PUBLISHED', 'lightgrey', 'https://scans.gradle.com')
}
function renderBuildScanBadge(outcomeText: string, outcomeColor: string, targetUrl: string): string {
const badgeUrl = `https://img.shields.io/badge/Build%20Scan%C2%AE-${outcomeText}-${outcomeColor}?logo=Gradle`
const badgeHtml = `<img src="${badgeUrl}" alt="Build Scan ${outcomeText}" />`
return `<a href="${targetUrl}" rel="nofollow">${badgeHtml}</a>`
}
function shouldGenerateJobSummary(buildResults: BuildResult[]): boolean {
// Check if Job Summary is supported on this platform
if (!process.env[SUMMARY_ENV_VAR]) {
return false
}
// Check if Job Summary is disabled using the deprecated input
if (!params.isJobSummaryEnabled()) {
return false
}
return shouldAddJobSummary(params.getJobSummaryOption(), buildResults)
}
function shouldAddPRComment(buildResults: BuildResult[]): boolean {
return shouldAddJobSummary(params.getPRCommentOption(), buildResults)
}
function shouldAddJobSummary(option: params.JobSummaryOption, buildResults: BuildResult[]): boolean {
switch (option) {
case params.JobSummaryOption.Always:
return true
case params.JobSummaryOption.Never:
return false
case params.JobSummaryOption.OnFailure:
core.info(`Got these build results: ${JSON.stringify(buildResults)}`)
return buildResults.some(result => result.buildFailed)
}
}

View File

@ -1,37 +0,0 @@
import * as core from '@actions/core'
import * as setupGradle from './setup-gradle'
import * as execution from './execution'
import * as provisioner from './provision'
import * as layout from './repository-layout'
import * as params from './input-params'
/**
* The main entry point for the action, called by Github Actions for the step.
*/
export async function run(): Promise<void> {
try {
// Configure Gradle environment (Gradle User Home)
await setupGradle.setup()
// Download and install Gradle if required
const executable = await provisioner.provisionGradle()
// Only execute if arguments have been provided
const args: string[] = params.getArguments()
if (args.length > 0) {
const buildRootDirectory = layout.buildRootDirectory()
await execution.executeGradleBuild(executable, buildRootDirectory, args)
}
} catch (error) {
core.setFailed(String(error))
if (error instanceof Error && error.stack) {
core.info(error.stack)
}
}
// Explicit process.exit() to prevent waiting for hanging promises.
process.exit()
}
run()

View File

@ -1,35 +0,0 @@
import * as core from '@actions/core'
import * as setupGradle from './setup-gradle'
import {PostActionJobFailure} from './errors'
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception. Instead of failing this action, just warn.
process.on('uncaughtException', e => handleFailure(e))
/**
* The post-execution entry point for the action, called by Github Actions after completing all steps for the Job.
*/
export async function run(): Promise<void> {
try {
await setupGradle.complete()
} catch (error) {
if (error instanceof PostActionJobFailure) {
core.setFailed(String(error))
} else {
handleFailure(error)
}
}
// Explicit process.exit() to prevent waiting for promises left hanging by `@actions/cache` on save.
process.exit()
}
function handleFailure(error: unknown): void {
core.warning(`Unhandled error in Gradle post-action - job will continue: ${error}`)
if (error instanceof Error && error.stack) {
core.info(error.stack)
}
}
run()

View File

@ -1,180 +0,0 @@
import * as fs from 'fs'
import * as os from 'os'
import * as path from 'path'
import * as httpm from '@actions/http-client'
import * as core from '@actions/core'
import * as cache from '@actions/cache'
import * as toolCache from '@actions/tool-cache'
import * as gradlew from './gradlew'
import * as params from './input-params'
import {handleCacheFailure, isCacheDisabled, isCacheReadOnly} from './cache-utils'
const gradleVersionsBaseUrl = 'https://services.gradle.org/versions'
/**
* Install any configured version of Gradle, adding the executable to the PATH.
* @return Installed Gradle executable or undefined if no version configured.
*/
export async function provisionGradle(): Promise<string | undefined> {
const gradleVersion = params.getGradleVersion()
if (gradleVersion !== '' && gradleVersion !== 'wrapper') {
return addToPath(await installGradle(gradleVersion))
}
return undefined
}
async function addToPath(executable: string): Promise<string> {
core.addPath(path.dirname(executable))
return executable
}
async function installGradle(version: string): Promise<string> {
const versionInfo = await resolveGradleVersion(version)
core.setOutput('gradle-version', versionInfo.version)
return installGradleVersion(versionInfo)
}
async function resolveGradleVersion(version: string): Promise<GradleVersionInfo> {
switch (version) {
case 'current':
return gradleCurrent()
case 'rc':
core.warning(`Specifying gradle-version 'rc' has been deprecated. Use 'release-candidate' instead.`)
return gradleReleaseCandidate()
case 'release-candidate':
return gradleReleaseCandidate()
case 'nightly':
return gradleNightly()
case 'release-nightly':
return gradleReleaseNightly()
default:
return gradle(version)
}
}
async function gradleCurrent(): Promise<GradleVersionInfo> {
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/current`)
}
async function gradleReleaseCandidate(): Promise<GradleVersionInfo> {
const versionInfo = await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/release-candidate`)
if (versionInfo && versionInfo.version && versionInfo.downloadUrl) {
return versionInfo
}
core.info('No current release-candidate found, will fallback to current')
return gradleCurrent()
}
async function gradleNightly(): Promise<GradleVersionInfo> {
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/nightly`)
}
async function gradleReleaseNightly(): Promise<GradleVersionInfo> {
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/release-nightly`)
}
async function gradle(version: string): Promise<GradleVersionInfo> {
const versionInfo = await findGradleVersionDeclaration(version)
if (!versionInfo) {
throw new Error(`Gradle version ${version} does not exists`)
}
return versionInfo
}
async function gradleVersionDeclaration(url: string): Promise<GradleVersionInfo> {
return await httpGetGradleVersion(url)
}
async function findGradleVersionDeclaration(version: string): Promise<GradleVersionInfo | undefined> {
const gradleVersions = await httpGetGradleVersions(`${gradleVersionsBaseUrl}/all`)
return gradleVersions.find((entry: GradleVersionInfo) => {
return entry.version === version
})
}
async function installGradleVersion(versionInfo: GradleVersionInfo): Promise<string> {
return core.group(`Provision Gradle ${versionInfo.version}`, async () => {
return locateGradleAndDownloadIfRequired(versionInfo)
})
}
async function locateGradleAndDownloadIfRequired(versionInfo: GradleVersionInfo): Promise<string> {
const installsDir = path.join(os.homedir(), 'gradle-installations/installs')
const installDir = path.join(installsDir, `gradle-${versionInfo.version}`)
if (fs.existsSync(installDir)) {
core.info(`Gradle installation already exists at ${installDir}`)
return executableFrom(installDir)
}
const downloadPath = await downloadAndCacheGradleDistribution(versionInfo)
await toolCache.extractZip(downloadPath, installsDir)
core.info(`Extracted Gradle ${versionInfo.version} to ${installDir}`)
const executable = executableFrom(installDir)
fs.chmodSync(executable, '755')
core.info(`Provisioned Gradle executable ${executable}`)
return executable
}
async function downloadAndCacheGradleDistribution(versionInfo: GradleVersionInfo): Promise<string> {
const downloadPath = path.join(os.homedir(), `gradle-installations/downloads/gradle-${versionInfo.version}-bin.zip`)
if (isCacheDisabled()) {
await downloadGradleDistribution(versionInfo, downloadPath)
return downloadPath
}
const cacheKey = `gradle-${versionInfo.version}`
try {
const restoreKey = await cache.restoreCache([downloadPath], cacheKey)
if (restoreKey) {
core.info(`Restored Gradle distribution ${cacheKey} from cache to ${downloadPath}`)
return downloadPath
}
} catch (error) {
handleCacheFailure(error, `Restore Gradle distribution ${versionInfo.version} failed`)
}
core.info(`Gradle distribution ${versionInfo.version} not found in cache. Will download.`)
await downloadGradleDistribution(versionInfo, downloadPath)
if (!isCacheReadOnly()) {
try {
await cache.saveCache([downloadPath], cacheKey)
} catch (error) {
handleCacheFailure(error, `Save Gradle distribution ${versionInfo.version} failed`)
}
}
return downloadPath
}
async function downloadGradleDistribution(versionInfo: GradleVersionInfo, downloadPath: string): Promise<void> {
await toolCache.downloadTool(versionInfo.downloadUrl, downloadPath)
core.info(`Downloaded ${versionInfo.downloadUrl} to ${downloadPath} (size ${fs.statSync(downloadPath).size})`)
}
function executableFrom(installDir: string): string {
return path.join(installDir, 'bin', `${gradlew.installScriptFilename()}`)
}
async function httpGetGradleVersion(url: string): Promise<GradleVersionInfo> {
return JSON.parse(await httpGetString(url))
}
async function httpGetGradleVersions(url: string): Promise<GradleVersionInfo[]> {
return JSON.parse(await httpGetString(url))
}
async function httpGetString(url: string): Promise<string> {
const httpClient = new httpm.HttpClient('gradle/gradle-build-action')
const response = await httpClient.get(url)
return response.readBody()
}
interface GradleVersionInfo {
version: string
downloadUrl: string
}

View File

@ -1,16 +0,0 @@
import * as params from './input-params'
import * as path from 'path'
export function workspaceDirectory(): string {
return process.env[`GITHUB_WORKSPACE`] || ''
}
export function buildRootDirectory(): string {
const baseDirectory = workspaceDirectory()
const buildRootDirectoryInput = params.getBuildRootDirectory()
const resolvedBuildRootDirectory =
buildRootDirectoryInput === ''
? path.resolve(baseDirectory)
: path.resolve(baseDirectory, buildRootDirectoryInput)
return resolvedBuildRootDirectory
}

View File

@ -1,65 +0,0 @@
import org.gradle.tooling.events.*
import org.gradle.tooling.events.task.*
import org.gradle.util.GradleVersion
// Can't use settingsEvaluated since this script is applied inside a settingsEvaluated handler
// But projectsEvaluated is good enough, since the build service won't catch configuration failures anyway
projectsEvaluated {
def projectTracker = gradle.sharedServices.registerIfAbsent("gradle-build-action-buildResultsRecorder", BuildResultsRecorder, { spec ->
spec.getParameters().getRootProjectName().set(gradle.rootProject.name)
spec.getParameters().getRootProjectDir().set(gradle.rootProject.rootDir.absolutePath)
spec.getParameters().getRequestedTasks().set(gradle.startParameter.taskNames.join(" "))
spec.getParameters().getGradleHomeDir().set(gradle.gradleHomeDir.absolutePath)
spec.getParameters().getInvocationId().set(gradle.ext.invocationId)
})
gradle.services.get(BuildEventsListenerRegistry).onTaskCompletion(projectTracker)
}
abstract class BuildResultsRecorder implements BuildService<BuildResultsRecorder.Params>, OperationCompletionListener, AutoCloseable {
private boolean buildFailed = false
interface Params extends BuildServiceParameters {
Property<String> getRootProjectName()
Property<String> getRootProjectDir()
Property<String> getRequestedTasks()
Property<String> getGradleHomeDir()
Property<String> getInvocationId()
}
public void onFinish(FinishEvent finishEvent) {
if (finishEvent instanceof TaskFinishEvent && finishEvent.result instanceof TaskFailureResult) {
buildFailed = true
}
}
@Override
public void close() {
def buildResults = [
rootProjectName: getParameters().getRootProjectName().get(),
rootProjectDir: getParameters().getRootProjectDir().get(),
requestedTasks: getParameters().getRequestedTasks().get(),
gradleVersion: GradleVersion.current().version,
gradleHomeDir: getParameters().getGradleHomeDir().get(),
buildFailed: buildFailed,
buildScanUri: null,
buildScanFailed: false
]
def runnerTempDir = System.getenv("RUNNER_TEMP")
def githubActionStep = System.getenv("GITHUB_ACTION")
if (!runnerTempDir || !githubActionStep) {
return
}
try {
def buildResultsDir = new File(runnerTempDir, ".build-results")
buildResultsDir.mkdirs()
def buildResultsFile = new File(buildResultsDir, githubActionStep + getParameters().getInvocationId().get() + ".json")
if (!buildResultsFile.exists()) {
buildResultsFile << groovy.json.JsonOutput.toJson(buildResults)
}
} catch (Exception e) {
println "\ngradle-build-action failed to write build-results file. Will continue.\n> ${e.getLocalizedMessage()}"
}
}
}

View File

@ -1,143 +0,0 @@
/*
* Capture information for each executed Gradle build to display in the job summary.
*/
import org.gradle.util.GradleVersion
// Only run against root build. Do not run against included builds.
def isTopLevelBuild = gradle.getParent() == null
if (isTopLevelBuild) {
def version = GradleVersion.current().baseVersion
def atLeastGradle3 = version >= GradleVersion.version("3.0")
def atLeastGradle6 = version >= GradleVersion.version("6.0")
def invocationId = "-${System.currentTimeMillis()}"
if (atLeastGradle6) {
def useBuildService = version >= GradleVersion.version("6.6")
settingsEvaluated { settings ->
// By default, use standard mechanisms to capture build results
if (useBuildService) {
captureUsingBuildService(settings, invocationId)
} else {
captureUsingBuildFinished(gradle, invocationId)
}
// The `buildScanPublished` hook allows the capture of the Build Scan URI.
// Results captured this way will overwrite any results from the other mechanism.
settings.pluginManager.withPlugin("com.gradle.enterprise") {
captureUsingBuildScanPublished(settings.extensions["gradleEnterprise"].buildScan, settings.rootProject, invocationId)
}
}
} else if (atLeastGradle3) {
projectsEvaluated { gradle ->
// By default, use 'buildFinished' to capture build results
captureUsingBuildFinished(gradle, invocationId)
// The `buildScanPublished` hook allows the capture of the Build Scan URI.
// Results captured this way will overwrite any results from 'buildFinished'.
gradle.rootProject.pluginManager.withPlugin("com.gradle.build-scan") {
captureUsingBuildScanPublished(gradle.rootProject.extensions["buildScan"], gradle.rootProject, invocationId)
}
}
}
}
def captureUsingBuildScanPublished(buildScanExtension, rootProject, invocationId) {
buildScanExtension.with {
def buildResults = new BuildResults(invocationId, gradle, rootProject)
buildFinished { result ->
buildResults.setBuildResult(result)
}
buildScanPublished { buildScan ->
buildResults.setBuildScanUri(buildScan.buildScanUri.toASCIIString())
buildResults.writeToResultsFile(true)
def githubOutput = System.getenv("GITHUB_OUTPUT")
if (githubOutput) {
new File(githubOutput) << "build-scan-url=${buildScan.buildScanUri}\n"
} else {
// Retained for compatibility with older GitHub Enterprise versions
println("::set-output name=build-scan-url::${buildScan.buildScanUri}")
}
}
onError { error ->
buildResults.setBuildScanFailed()
buildResults.writeToResultsFile(true)
}
}
}
def captureUsingBuildFinished(gradle, invocationId) {
gradle.buildFinished { result ->
println "Got buildFinished: ${result}"
def buildResults = new BuildResults(invocationId, gradle, gradle.rootProject)
buildResults.setBuildResult(result)
buildResults.writeToResultsFile(false)
}
}
def captureUsingBuildService(settings, invocationId) {
gradle.ext.invocationId = invocationId
apply from: 'gradle-build-action.build-result-capture-service.plugin.groovy'
}
class BuildResults {
def invocationId
def buildResults
BuildResults(String invocationId, def gradle, def rootProject) {
this.invocationId = invocationId
buildResults = [
rootProjectName: rootProject.name,
rootProjectDir: rootProject.projectDir.absolutePath,
requestedTasks: gradle.startParameter.taskNames.join(" "),
gradleVersion: GradleVersion.current().version,
gradleHomeDir: gradle.gradleHomeDir.absolutePath,
buildFailed: false,
buildScanUri: null,
buildScanFailed: false
]
}
def setBuildResult(def result) {
buildResults['buildFailed'] = result.failure != null
}
def setBuildScanUri(def buildScanUrl) {
buildResults['buildScanUri'] = buildScanUrl
}
def setBuildScanFailed() {
buildResults['buildScanFailed'] = true
}
def writeToResultsFile(boolean overwrite) {
def runnerTempDir = System.getenv("RUNNER_TEMP")
def githubActionStep = System.getenv("GITHUB_ACTION")
if (!runnerTempDir || !githubActionStep) {
return
}
try {
def buildResultsDir = new File(runnerTempDir, ".build-results")
buildResultsDir.mkdirs()
def buildResultsFile = new File(buildResultsDir, githubActionStep + invocationId + ".json")
// Overwrite any contents written by buildFinished or build service, since this result is a superset.
if (buildResultsFile.exists()) {
if (overwrite) {
buildResultsFile.text = groovy.json.JsonOutput.toJson(buildResults)
}
} else {
buildResultsFile << groovy.json.JsonOutput.toJson(buildResults)
}
} catch (Exception e) {
println "\ngradle-build-action failed to write build-results file. Will continue.\n> ${e.getLocalizedMessage()}"
}
}
}

View File

@ -1,15 +0,0 @@
buildscript {
def getInputParam = { String name ->
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
return System.getProperty(name) ?: System.getenv(envVarName)
}
def pluginRepositoryUrl = getInputParam('gradle.plugin-repository.url') ?: 'https://plugins.gradle.org/m2'
repositories {
maven { url pluginRepositoryUrl }
}
dependencies {
classpath "org.gradle:github-dependency-graph-gradle-plugin:1.1.1"
}
}
apply plugin: org.gradle.github.GitHubDependencyGraphPlugin

View File

@ -1,65 +0,0 @@
import org.gradle.util.GradleVersion
// Only run when dependency graph is explicitly enabled
if (getVariable('GITHUB_DEPENDENCY_GRAPH_ENABLED') != "true") {
return
}
// Do not run for unsupported versions of Gradle
def gradleVersion = GradleVersion.current().baseVersion
if (gradleVersion < GradleVersion.version("5.2") ||
(gradleVersion >= GradleVersion.version("7.0") && gradleVersion < GradleVersion.version("7.1"))) {
if (getVariable('GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE') != "true") {
throw new GradleException("Dependency Graph is not supported for ${gradleVersion}. No dependency snapshot will be generated.")
}
println "::warning::Dependency Graph is not supported for ${gradleVersion}. No dependency snapshot will be generated."
return
}
// Attempt to find a unique job correlator to use based on the environment variable
// This is only required for top-level builds
def isTopLevelBuild = gradle.getParent() == null
if (isTopLevelBuild) {
def reportFile = getUniqueReportFile(getVariable('GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR'))
if (reportFile == null) {
println "::warning::No dependency snapshot generated for step. Could not determine unique job correlator - specify GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR var for this step."
return
}
println "Generating dependency graph into '${reportFile}'"
}
apply from: 'gradle-build-action.github-dependency-graph-gradle-plugin-apply.groovy'
/**
* Using the supplied jobCorrelator value:
* - Checks if report file already exists
* - If so, tries to find a unique value that does not yet have a corresponding report file.
* - When found, this value is set as a System property override.
*/
File getUniqueReportFile(String jobCorrelator) {
def reportDir = getVariable('DEPENDENCY_GRAPH_REPORT_DIR')
def reportFile = new File(reportDir, jobCorrelator + ".json")
if (!reportFile.exists()) return reportFile
// Try at most 100 suffixes
for (int i = 1; i < 100; i++) {
def candidateCorrelator = jobCorrelator + "-" + i
def candidateFile = new File(reportDir, candidateCorrelator + ".json")
if (!candidateFile.exists()) {
System.properties['GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR'] = candidateCorrelator
return candidateFile
}
}
// Could not determine unique job correlator
return null
}
/**
* Return the environment variable value, or equivalent system property (if set)
*/
String getVariable(String name) {
return System.properties[name] ?: System.getenv(name)
}

View File

@ -1,211 +0,0 @@
import org.gradle.util.GradleVersion
// note that there is no mechanism to share code between the initscript{} block and the main script, so some logic is duplicated
// conditionally apply the GE / Build Scan plugin to the classpath so it can be applied to the build further down in this script
initscript {
def isTopLevelBuild = !gradle.parent
if (!isTopLevelBuild) {
return
}
def getInputParam = { String name ->
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
return System.getProperty(name) ?: System.getenv(envVarName)
}
// finish early if injection is disabled
def gradleInjectionEnabled = getInputParam("develocity.injection-enabled")
if (gradleInjectionEnabled != "true") {
return
}
def pluginRepositoryUrl = getInputParam('gradle.plugin-repository.url')
def gePluginVersion = getInputParam('develocity.plugin.version')
def ccudPluginVersion = getInputParam('develocity.ccud-plugin.version')
def atLeastGradle5 = GradleVersion.current() >= GradleVersion.version('5.0')
def atLeastGradle4 = GradleVersion.current() >= GradleVersion.version('4.0')
if (gePluginVersion || ccudPluginVersion && atLeastGradle4) {
pluginRepositoryUrl = pluginRepositoryUrl ?: 'https://plugins.gradle.org/m2'
logger.quiet("Develocity plugins resolution: $pluginRepositoryUrl")
repositories {
maven { url pluginRepositoryUrl }
}
}
dependencies {
if (gePluginVersion) {
classpath atLeastGradle5 ?
"com.gradle:gradle-enterprise-gradle-plugin:$gePluginVersion" :
"com.gradle:build-scan-plugin:1.16"
}
if (ccudPluginVersion && atLeastGradle4) {
classpath "com.gradle:common-custom-user-data-gradle-plugin:$ccudPluginVersion"
}
}
}
def BUILD_SCAN_PLUGIN_ID = 'com.gradle.build-scan'
def BUILD_SCAN_PLUGIN_CLASS = 'com.gradle.scan.plugin.BuildScanPlugin'
def DEVELOCITY_PLUGIN_ID = 'com.gradle.enterprise'
def DEVELOCITY_PLUGIN_CLASS = 'com.gradle.enterprise.gradleplugin.GradleEnterprisePlugin'
def DEVELOCITY_EXTENSION_CLASS = 'com.gradle.enterprise.gradleplugin.GradleEnterpriseExtension'
def CI_AUTO_INJECTION_CUSTOM_VALUE_NAME = 'CI auto injection'
def CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE = 'gradle-build-action'
def CCUD_PLUGIN_ID = 'com.gradle.common-custom-user-data-gradle-plugin'
def CCUD_PLUGIN_CLASS = 'com.gradle.CommonCustomUserDataGradlePlugin'
def isTopLevelBuild = !gradle.parent
if (!isTopLevelBuild) {
return
}
def getInputParam = { String name ->
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
return System.getProperty(name) ?: System.getenv(envVarName)
}
// finish early if injection is disabled
def gradleInjectionEnabled = getInputParam("develocity.injection-enabled")
if (gradleInjectionEnabled != "true") {
return
}
def geUrl = getInputParam('develocity.url')
def geAllowUntrustedServer = Boolean.parseBoolean(getInputParam('develocity.allow-untrusted-server'))
def geEnforceUrl = Boolean.parseBoolean(getInputParam('develocity.enforce-url'))
def buildScanUploadInBackground = Boolean.parseBoolean(getInputParam('develocity.build-scan.upload-in-background'))
def gePluginVersion = getInputParam('develocity.plugin.version')
def ccudPluginVersion = getInputParam('develocity.ccud-plugin.version')
def buildScanTermsOfServiceUrl = getInputParam('build-scan.terms-of-service.url')
def buildScanTermsOfServiceAgree = getInputParam('build-scan.terms-of-service.agree')
def atLeastGradle4 = GradleVersion.current() >= GradleVersion.version('4.0')
// finish early if configuration parameters passed in via system properties are not valid/supported
if (ccudPluginVersion && isNotAtLeast(ccudPluginVersion, '1.7')) {
logger.warn("Common Custom User Data Gradle plugin must be at least 1.7. Configured version is $ccudPluginVersion.")
return
}
// register buildScanPublished listener and optionally apply the GE / Build Scan plugin
if (GradleVersion.current() < GradleVersion.version('6.0')) {
rootProject {
buildscript.configurations.getByName("classpath").incoming.afterResolve { ResolvableDependencies incoming ->
def resolutionResult = incoming.resolutionResult
if (gePluginVersion) {
def scanPluginComponent = resolutionResult.allComponents.find {
it.moduleVersion.with { group == "com.gradle" && (name == "build-scan-plugin" || name == "gradle-enterprise-gradle-plugin") }
}
if (!scanPluginComponent) {
logger.quiet("Applying $BUILD_SCAN_PLUGIN_CLASS via init script")
applyPluginExternally(pluginManager, BUILD_SCAN_PLUGIN_CLASS)
if (geUrl) {
logger.quiet("Connection to Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
buildScan.server = geUrl
buildScan.allowUntrustedServer = geAllowUntrustedServer
}
buildScan.publishAlways()
if (buildScan.metaClass.respondsTo(buildScan, 'setUploadInBackground', Boolean)) buildScan.uploadInBackground = buildScanUploadInBackground // uploadInBackground not available for build-scan-plugin 1.16
buildScan.value CI_AUTO_INJECTION_CUSTOM_VALUE_NAME, CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE
}
if (geUrl && geEnforceUrl) {
pluginManager.withPlugin(BUILD_SCAN_PLUGIN_ID) {
afterEvaluate {
logger.quiet("Enforcing Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
buildScan.server = geUrl
buildScan.allowUntrustedServer = geAllowUntrustedServer
}
}
}
if (buildScanTermsOfServiceUrl && buildScanTermsOfServiceAgree) {
buildScan.termsOfServiceUrl = buildScanTermsOfServiceUrl
buildScan.termsOfServiceAgree = buildScanTermsOfServiceAgree
}
}
if (ccudPluginVersion && atLeastGradle4) {
def ccudPluginComponent = resolutionResult.allComponents.find {
it.moduleVersion.with { group == "com.gradle" && name == "common-custom-user-data-gradle-plugin" }
}
if (!ccudPluginComponent) {
logger.quiet("Applying $CCUD_PLUGIN_CLASS via init script")
pluginManager.apply(initscript.classLoader.loadClass(CCUD_PLUGIN_CLASS))
}
}
}
}
} else {
gradle.settingsEvaluated { settings ->
if (gePluginVersion) {
if (!settings.pluginManager.hasPlugin(DEVELOCITY_PLUGIN_ID)) {
logger.quiet("Applying $DEVELOCITY_PLUGIN_CLASS via init script")
applyPluginExternally(settings.pluginManager, DEVELOCITY_PLUGIN_CLASS)
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
if (geUrl) {
logger.quiet("Connection to Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
ext.server = geUrl
ext.allowUntrustedServer = geAllowUntrustedServer
}
ext.buildScan.publishAlways()
ext.buildScan.uploadInBackground = buildScanUploadInBackground
ext.buildScan.value CI_AUTO_INJECTION_CUSTOM_VALUE_NAME, CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE
}
}
if (geUrl && geEnforceUrl) {
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
logger.quiet("Enforcing Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
ext.server = geUrl
ext.allowUntrustedServer = geAllowUntrustedServer
}
}
if (buildScanTermsOfServiceUrl && buildScanTermsOfServiceAgree) {
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
ext.buildScan.termsOfServiceUrl = buildScanTermsOfServiceUrl
ext.buildScan.termsOfServiceAgree = buildScanTermsOfServiceAgree
}
}
}
if (ccudPluginVersion) {
if (!settings.pluginManager.hasPlugin(CCUD_PLUGIN_ID)) {
logger.quiet("Applying $CCUD_PLUGIN_CLASS via init script")
settings.pluginManager.apply(initscript.classLoader.loadClass(CCUD_PLUGIN_CLASS))
}
}
}
}
void applyPluginExternally(def pluginManager, String pluginClassName) {
def externallyApplied = 'develocity.externally-applied'
def oldValue = System.getProperty(externallyApplied)
System.setProperty(externallyApplied, 'true')
try {
pluginManager.apply(initscript.classLoader.loadClass(pluginClassName))
} finally {
if (oldValue == null) {
System.clearProperty(externallyApplied)
} else {
System.setProperty(externallyApplied, oldValue)
}
}
}
static def eachDevelocityExtension(def settings, def publicType, def action) {
settings.extensions.extensionsSchema.elements.findAll { it.publicType.concreteClass.name == publicType }
.collect { settings[it.name] }.each(action)
}
static boolean isNotAtLeast(String versionUnderTest, String referenceVersion) {
GradleVersion.version(versionUnderTest) < GradleVersion.version(referenceVersion)
}

View File

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<toolchains>
<!-- JDK Toolchains installed by default on GitHub-hosted runners -->
<toolchain>
<type>jdk</type>
<provides>
<version>8</version>
<vendor>Eclipse Temurin</vendor>
</provides>
<configuration>
<jdkHome>${env.JAVA_HOME_8_X64}</jdkHome>
</configuration>
</toolchain>
<toolchain>
<type>jdk</type>
<provides>
<version>11</version>
<vendor>Eclipse Temurin</vendor>
</provides>
<configuration>
<jdkHome>${env.JAVA_HOME_11_X64}</jdkHome>
</configuration>
</toolchain>
<toolchain>
<type>jdk</type>
<provides>
<version>17</version>
<vendor>Eclipse Temurin</vendor>
</provides>
<configuration>
<jdkHome>${env.JAVA_HOME_17_X64}</jdkHome>
</configuration>
</toolchain>
<toolchain>
<type>jdk</type>
<provides>
<version>21</version>
<vendor>Eclipse Temurin</vendor>
</provides>
<configuration>
<jdkHome>${env.JAVA_HOME_21_X64}</jdkHome>
</configuration>
</toolchain>
</toolchains>

View File

@ -1,97 +0,0 @@
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import * as path from 'path'
import * as os from 'os'
import * as caches from './caches'
import * as layout from './repository-layout'
import * as params from './input-params'
import * as dependencyGraph from './dependency-graph'
import * as jobSummary from './job-summary'
import * as buildScan from './build-scan'
import {loadBuildResults} from './build-results'
import {CacheListener} from './cache-reporting'
import {DaemonController} from './daemon-controller'
const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED'
const USER_HOME = 'USER_HOME'
const GRADLE_USER_HOME = 'GRADLE_USER_HOME'
const CACHE_LISTENER = 'CACHE_LISTENER'
export async function setup(): Promise<void> {
const userHome = await determineUserHome()
const gradleUserHome = await determineGradleUserHome()
// Bypass setup on all but first action step in workflow.
if (process.env[GRADLE_SETUP_VAR]) {
core.info('Gradle setup only performed on first gradle-build-action step in workflow.')
return
}
// Record setup complete: visible to all subsequent actions and prevents duplicate setup
core.exportVariable(GRADLE_SETUP_VAR, true)
// Record setup complete: visible in post-action, to control action completion
core.saveState(GRADLE_SETUP_VAR, true)
// Save the User Home and Gradle User Home for use in the post-action step.
core.saveState(USER_HOME, userHome)
core.saveState(GRADLE_USER_HOME, gradleUserHome)
const cacheListener = new CacheListener()
await caches.restore(userHome, gradleUserHome, cacheListener)
core.saveState(CACHE_LISTENER, cacheListener.stringify())
await dependencyGraph.setup(params.getDependencyGraphOption())
buildScan.setup()
}
export async function complete(): Promise<void> {
if (!core.getState(GRADLE_SETUP_VAR)) {
core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.')
return
}
core.info('In post-action step')
const buildResults = loadBuildResults()
const userHome = core.getState(USER_HOME)
const gradleUserHome = core.getState(GRADLE_USER_HOME)
const cacheListener: CacheListener = CacheListener.rehydrate(core.getState(CACHE_LISTENER))
const daemonController = new DaemonController(buildResults)
await caches.save(userHome, gradleUserHome, cacheListener, daemonController)
await jobSummary.generateJobSummary(buildResults, cacheListener)
await dependencyGraph.complete(params.getDependencyGraphOption())
core.info('Completed post-action step')
}
async function determineGradleUserHome(): Promise<string> {
const customGradleUserHome = process.env['GRADLE_USER_HOME']
if (customGradleUserHome) {
const rootDir = layout.workspaceDirectory()
return path.resolve(rootDir, customGradleUserHome)
}
return path.resolve(await determineUserHome(), '.gradle')
}
/**
* Different values can be returned by os.homedir() in Javascript and System.getProperty('user.home') in Java.
* In order to determine the correct Gradle User Home, we ask Java for the user home instead of using os.homedir().
*/
async function determineUserHome(): Promise<string> {
const output = await exec.getExecOutput('java', ['-XshowSettings:properties', '-version'], {silent: true})
const regex = /user\.home = (\S*)/i
const found = output.stderr.match(regex)
if (found == null || found.length <= 1) {
core.info('Could not determine user.home from java -version output. Using os.homedir().')
return os.homedir()
}
const userHome = found[1]
core.debug(`Determined user.home from java -version output: '${userHome}'`)
return userHome
}

View File

@ -1,2 +0,0 @@
build
.gradle

View File

@ -1,28 +0,0 @@
plugins {
id 'groovy'
}
java {
toolchain {
languageVersion = JavaLanguageVersion.of(8)
}
}
repositories {
mavenCentral()
}
dependencies {
testImplementation gradleTestKit()
testImplementation 'org.spockframework:spock-core:2.3-groovy-3.0'
testImplementation('org.spockframework:spock-junit4:2.3-groovy-3.0')
testImplementation ('io.ratpack:ratpack-groovy-test:1.9.0') {
exclude group: 'org.codehaus.groovy', module: 'groovy-all'
}
testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.16.0'
}
test {
useJUnitPlatform()
}

View File

@ -1,8 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=591855b517fc635b9e04de1d05d5e76ada3f89f5fc76f87978d1b245b4f69225
distributionUrl=https\://services.gradle.org/distributions/gradle-8.3-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -1,249 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@ -1,92 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,14 +0,0 @@
plugins {
id "com.gradle.enterprise" version "3.16.1"
id "com.gradle.common-custom-user-data-gradle-plugin" version "1.12.1"
}
gradleEnterprise {
buildScan {
termsOfServiceUrl = "https://gradle.com/terms-of-service"
termsOfServiceAgree = "yes"
publishAlways()
uploadInBackground = false
}
}
rootProject.name = 'test-init-scripts'

View File

@ -1,262 +0,0 @@
package com.gradle.gradlebuildaction
import com.fasterxml.jackson.core.JsonFactory
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.smile.SmileFactory
import org.gradle.testkit.runner.BuildResult
import org.gradle.testkit.runner.GradleRunner
import org.gradle.testkit.runner.internal.DefaultGradleRunner
import org.gradle.util.GradleVersion
import ratpack.groovy.test.embed.GroovyEmbeddedApp
import spock.lang.AutoCleanup
import spock.lang.Specification
import spock.lang.TempDir
import java.nio.file.Files
import java.util.zip.GZIPOutputStream
class BaseInitScriptTest extends Specification {
static final String DEVELOCITY_PLUGIN_VERSION = '3.16.1'
static final String CCUD_PLUGIN_VERSION = '1.12.1'
static final TestGradleVersion GRADLE_3_X = new TestGradleVersion(GradleVersion.version('3.5.1'), 7, 9)
static final TestGradleVersion GRADLE_4_X = new TestGradleVersion(GradleVersion.version('4.10.3'), 7, 10)
static final TestGradleVersion GRADLE_5_X = new TestGradleVersion(GradleVersion.version('5.6.4'), 8, 12)
static final TestGradleVersion GRADLE_6_NO_BUILD_SERVICE = new TestGradleVersion(GradleVersion.version('6.5.1'), 8, 14)
static final TestGradleVersion GRADLE_6_X = new TestGradleVersion(GradleVersion.version('6.9.4'), 8, 15)
static final TestGradleVersion GRADLE_7_1 = new TestGradleVersion(GradleVersion.version('7.6.2'), 8, 19)
static final TestGradleVersion GRADLE_7_X = new TestGradleVersion(GradleVersion.version('7.6.2'), 8, 19)
static final TestGradleVersion GRADLE_8_0 = new TestGradleVersion(GradleVersion.version('8.0.2'), 8, 19)
static final TestGradleVersion GRADLE_8_X = new TestGradleVersion(GradleVersion.version('8.5'), 8, 19)
static final List<TestGradleVersion> ALL_VERSIONS = [
GRADLE_3_X, // First version where TestKit supports environment variables
GRADLE_4_X,
GRADLE_5_X,
GRADLE_6_NO_BUILD_SERVICE, // Last version without build service support
GRADLE_6_X,
GRADLE_7_X,
GRADLE_8_0,
GRADLE_8_X,
]
static final List<TestGradleVersion> CONFIGURATION_CACHE_VERSIONS =
[GRADLE_7_X, GRADLE_8_0, GRADLE_8_X]
static final List<TestGradleVersion> SETTINGS_PLUGIN_VERSIONS =
[GRADLE_6_X, GRADLE_7_X, GRADLE_8_0, GRADLE_8_X]
static final String PUBLIC_BUILD_SCAN_ID = 'i2wepy2gr7ovw'
static final String DEFAULT_SCAN_UPLOAD_TOKEN = 'scan-upload-token'
static final String ROOT_PROJECT_NAME = 'test-init-script'
boolean failScanUpload = false
File settingsFile
File buildFile
@TempDir
File testProjectDir
@AutoCleanup
def mockScansServer = GroovyEmbeddedApp.of {
def jsonWriter = new ObjectMapper(new JsonFactory()).writer()
def smileWriter = new ObjectMapper(new SmileFactory()).writer()
handlers {
post('in/:gradleVersion/:pluginVersion') {
if (failScanUpload) {
context.response.status(401).send()
return
}
def scanUrlString = "${mockScansServer.address}s/$PUBLIC_BUILD_SCAN_ID"
def body = [
id : PUBLIC_BUILD_SCAN_ID,
scanUrl: scanUrlString.toString(),
]
def out = new ByteArrayOutputStream()
new GZIPOutputStream(out).withStream { smileWriter.writeValue(it, body) }
context.response
.contentType('application/vnd.gradle.scan-ack')
.send(out.toByteArray())
}
prefix('scans/publish') {
post('gradle/:pluginVersion/token') {
if (failScanUpload) {
context.response.status(401).send()
return
}
def pluginVersion = context.pathTokens.pluginVersion
def scanUrlString = "${mockScansServer.address}s/$PUBLIC_BUILD_SCAN_ID"
def body = [
id : PUBLIC_BUILD_SCAN_ID,
scanUrl : scanUrlString.toString(),
scanUploadUrl : "${mockScansServer.address.toString()}scans/publish/gradle/$pluginVersion/upload".toString(),
scanUploadToken: DEFAULT_SCAN_UPLOAD_TOKEN
]
context.response
.contentType('application/vnd.gradle.scan-ack+json')
.send(jsonWriter.writeValueAsBytes(body))
}
post('gradle/:pluginVersion/upload') {
if (failScanUpload) {
context.response.status(401).send()
return
}
context.request.getBody(1024 * 1024 * 10).then {
context.response
.contentType('application/vnd.gradle.scan-upload-ack+json')
.send()
}
}
notFound()
}
}
}
def setup() {
settingsFile = new File(testProjectDir, 'settings.gradle')
buildFile = new File(testProjectDir, 'build.gradle')
File srcInitScriptsDir = new File("../../src/resources/init-scripts")
File targetInitScriptsDir = new File(testProjectDir, "initScripts")
targetInitScriptsDir.mkdirs()
for (File srcInitScript : srcInitScriptsDir.listFiles()) {
File targetInitScript = new File(targetInitScriptsDir, srcInitScript.name)
Files.copy(srcInitScript.toPath(), targetInitScript.toPath())
}
settingsFile << "rootProject.name = '${ROOT_PROJECT_NAME}'\n"
buildFile << ''
}
def declareGePluginApplication(GradleVersion gradleVersion, URI serverUrl = mockScansServer.address) {
settingsFile.text = maybeAddPluginsToSettings(gradleVersion, null, serverUrl) + settingsFile.text
buildFile.text = maybeAddPluginsToRootProject(gradleVersion, null, serverUrl) + buildFile.text
}
def declareGePluginAndCcudPluginApplication(GradleVersion gradleVersion, URI serverUrl = mockScansServer.address) {
settingsFile.text = maybeAddPluginsToSettings(gradleVersion, CCUD_PLUGIN_VERSION, serverUrl) + settingsFile.text
buildFile.text = maybeAddPluginsToRootProject(gradleVersion, CCUD_PLUGIN_VERSION, serverUrl) + buildFile.text
}
String maybeAddPluginsToSettings(GradleVersion gradleVersion, String ccudPluginVersion, URI serverUri) {
if (gradleVersion < GradleVersion.version('5.0')) {
'' // applied in build.gradle
} else if (gradleVersion < GradleVersion.version('6.0')) {
'' // applied in build.gradle
} else {
"""
plugins {
id 'com.gradle.enterprise' version '${DEVELOCITY_PLUGIN_VERSION}'
${ccudPluginVersion ? "id 'com.gradle.common-custom-user-data-gradle-plugin' version '$ccudPluginVersion'" : ""}
}
gradleEnterprise {
server = '$serverUri'
buildScan {
publishAlways()
}
}
"""
}
}
String maybeAddPluginsToRootProject(GradleVersion gradleVersion, String ccudPluginVersion, URI serverUrl) {
if (gradleVersion < GradleVersion.version('5.0')) {
"""
plugins {
id 'com.gradle.build-scan' version '1.16'
${ccudPluginVersion ? "id 'com.gradle.common-custom-user-data-gradle-plugin' version '$ccudPluginVersion'" : ""}
}
buildScan {
server = '$serverUrl'
publishAlways()
}
"""
} else if (gradleVersion < GradleVersion.version('6.0')) {
"""
plugins {
id 'com.gradle.build-scan' version '${DEVELOCITY_PLUGIN_VERSION}'
${ccudPluginVersion ? "id 'com.gradle.common-custom-user-data-gradle-plugin' version '$ccudPluginVersion'" : ""}
}
gradleEnterprise {
server = '$serverUrl'
buildScan {
publishAlways()
}
}
"""
} else {
'' // applied in settings.gradle
}
}
def addFailingTaskToBuild() {
buildFile << '''
task expectFailure {
doLast {
throw new RuntimeException("Expected to fail")
}
}
'''
}
BuildResult run(List<String> args, String initScript, GradleVersion gradleVersion = GradleVersion.current(), List<String> jvmArgs = [], Map<String, String> envVars = [:]) {
createRunner(initScript, args, gradleVersion, jvmArgs, envVars).build()
}
BuildResult runAndFail(List<String> args, String initScript, GradleVersion gradleVersion = GradleVersion.current(), List<String> jvmArgs = [], Map<String, String> envVars = [:]) {
createRunner(initScript, args, gradleVersion, jvmArgs, envVars).buildAndFail()
}
GradleRunner createRunner(String initScript, List<String> args, GradleVersion gradleVersion = GradleVersion.current(), List<String> jvmArgs = [], Map<String, String> envVars = [:]) {
File initScriptsDir = new File(testProjectDir, "initScripts")
args << '-I' << new File(initScriptsDir, initScript).absolutePath
envVars.putIfAbsent('RUNNER_TEMP', testProjectDir.absolutePath)
envVars.putIfAbsent('GITHUB_ACTION', 'github-step-id')
def runner = ((DefaultGradleRunner) GradleRunner.create())
.withJvmArguments(jvmArgs)
.withGradleVersion(gradleVersion.version)
.withProjectDir(testProjectDir)
.withArguments(args)
.withEnvironment(envVars)
.forwardOutput()
runner
}
static final class TestGradleVersion {
final GradleVersion gradleVersion
private final Integer jdkMin
private final Integer jdkMax
TestGradleVersion(GradleVersion gradleVersion, Integer jdkMin, Integer jdkMax) {
this.gradleVersion = gradleVersion
this.jdkMin = jdkMin
this.jdkMax = jdkMax
}
boolean isCompatibleWithCurrentJvm() {
def jvmVersion = getJvmVersion()
jdkMin <= jvmVersion && jvmVersion <= jdkMax
}
private static int getJvmVersion() {
String version = System.getProperty('java.version')
if (version.startsWith('1.')) {
Integer.parseInt(version.substring(2, 3))
} else {
Integer.parseInt(version.substring(0, version.indexOf('.')))
}
}
@Override
String toString() {
return "Gradle " + gradleVersion.version
}
}
}

View File

@ -1,217 +0,0 @@
package com.gradle.gradlebuildaction
import groovy.json.JsonSlurper
import static org.junit.Assume.assumeTrue
class TestBuildResultRecorder extends BaseInitScriptTest {
def initScript = 'gradle-build-action.build-result-capture.init.gradle'
def "produces build results file for build with #testGradleVersion"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, false)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file for failing build with #testGradleVersion"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
addFailingTaskToBuild()
runAndFail(['expectFailure'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('expectFailure', testGradleVersion, true, false)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file for build with --configuration-cache on #testGradleVersion"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help', '--configuration-cache'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, false)
assert buildResultFile.delete()
when:
run(['help', '--configuration-cache'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, false)
where:
testGradleVersion << CONFIGURATION_CACHE_VERSIONS
}
def "produces build results file for #testGradleVersion with build scan published"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
declareGePluginApplication(testGradleVersion.gradleVersion)
run(['help'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, true)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file for #testGradleVersion with ge-plugin and no build scan published"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
declareGePluginApplication(testGradleVersion.gradleVersion)
run(['help', '--no-scan'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, false)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file for failing build on #testGradleVersion with build scan published"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
declareGePluginApplication(testGradleVersion.gradleVersion)
addFailingTaskToBuild()
runAndFail(['expectFailure'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('expectFailure', testGradleVersion, true, true)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file for build with --configuration-cache on #testGradleVersion with build scan published"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
declareGePluginApplication(testGradleVersion.gradleVersion)
run(['help', '--configuration-cache'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, true)
assert buildResultFile.delete()
when:
run(['help', '--configuration-cache'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, true)
where:
testGradleVersion << CONFIGURATION_CACHE_VERSIONS
}
def "produces build results file for failing build on #testGradleVersion when build scan publish fails"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
declareGePluginApplication(testGradleVersion.gradleVersion)
addFailingTaskToBuild()
failScanUpload = true
runAndFail(['expectFailure'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('expectFailure', testGradleVersion, true, false, true)
where:
testGradleVersion << ALL_VERSIONS
}
def "produces no build results file when GitHub env vars not set with #testGradleVersion"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help'], initScript, testGradleVersion.gradleVersion, [], [RUNNER_TEMP: '', GITHUB_ACTION: ''])
then:
def buildResultsDir = new File(testProjectDir, '.build-results')
assert !buildResultsDir.exists()
where:
testGradleVersion << ALL_VERSIONS
}
def "produces no build results file when RUNNER_TEMP dir is not a writable directory with #testGradleVersion"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def invalidDir = new File(testProjectDir, 'invalid-runner-temp')
invalidDir.createNewFile()
run(['help'], initScript, testGradleVersion.gradleVersion, [], [RUNNER_TEMP: invalidDir.absolutePath])
then:
def buildResultsDir = new File(testProjectDir, '.build-results')
assert !buildResultsDir.exists()
where:
testGradleVersion << ALL_VERSIONS
}
def "produces build results file with build scan when GE plugin is applied in settingsEvaluated"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
settingsFile.text = """
plugins {
id 'com.gradle.enterprise' version '3.16.1' apply(false)
}
gradle.settingsEvaluated {
apply plugin: 'com.gradle.enterprise'
gradleEnterprise {
server = '$mockScansServer.address'
buildScan {
publishAlways()
}
}
}
""" + settingsFile.text
run(['help'], initScript, testGradleVersion.gradleVersion)
then:
assertResults('help', testGradleVersion, false, true)
where:
testGradleVersion << SETTINGS_PLUGIN_VERSIONS
}
void assertResults(String task, TestGradleVersion testGradleVersion, boolean hasFailure, boolean hasBuildScan, boolean scanUploadFailed = false) {
def results = new JsonSlurper().parse(buildResultFile)
assert results['rootProjectName'] == ROOT_PROJECT_NAME
assert results['rootProjectDir'] == testProjectDir.canonicalPath
assert results['requestedTasks'] == task
assert results['gradleVersion'] == testGradleVersion.gradleVersion.version
assert results['gradleHomeDir'] != null
assert results['buildFailed'] == hasFailure
assert results['buildScanUri'] == (hasBuildScan ? "${mockScansServer.address}s/${PUBLIC_BUILD_SCAN_ID}" : null)
assert results['buildScanFailed'] == scanUploadFailed
}
private File getBuildResultFile() {
def buildResultsDir = new File(testProjectDir, '.build-results')
assert buildResultsDir.directory
assert buildResultsDir.listFiles().size() == 1
def resultsFile = buildResultsDir.listFiles()[0]
assert resultsFile.name.startsWith('github-step-id')
assert resultsFile.text.count('rootProjectName') == 1
return resultsFile
}
}

View File

@ -1,150 +0,0 @@
package com.gradle.gradlebuildaction
import org.gradle.util.GradleVersion
import static org.junit.Assume.assumeTrue
class TestDependencyGraph extends BaseInitScriptTest {
def initScript = 'gradle-build-action.github-dependency-graph.init.gradle'
static final TestGradleVersion GRADLE_5_1 = new TestGradleVersion(GradleVersion.version('5.1.1'), 8, 12)
static final TestGradleVersion GRADLE_7_0 = new TestGradleVersion(GradleVersion.version('7.0.1'), 8, 12)
static final List<TestGradleVersion> NO_DEPENDENCY_GRAPH_VERSIONS = [GRADLE_3_X, GRADLE_4_X, GRADLE_5_1, GRADLE_7_0]
static final List<TestGradleVersion> DEPENDENCY_GRAPH_VERSIONS = ALL_VERSIONS - NO_DEPENDENCY_GRAPH_VERSIONS
def "does not produce dependency graph when not enabled"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help'], initScript, testGradleVersion.gradleVersion)
then:
assert !reportsDir.exists()
where:
testGradleVersion << ALL_VERSIONS
}
def "produces dependency graph when enabled"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert reportFile.exists()
where:
testGradleVersion << [GRADLE_8_X]
}
def "produces dependency graph with configuration-cache on latest Gradle"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
run(['help', '--configuration-cache'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert reportFile.exists()
where:
// Dependency-graph plugin doesn't support config-cache for 8.0 of Gradle
testGradleVersion << [GRADLE_8_X]
}
def "warns and produces no dependency graph when enabled for older Gradle versions"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def result = run(['help'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert !reportsDir.exists()
assert result.output.contains("::warning::Dependency Graph is not supported for ${testGradleVersion}")
where:
testGradleVersion << NO_DEPENDENCY_GRAPH_VERSIONS
}
def "fails build when enabled for older Gradle versions if continue-on-failure is false"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def vars = envVars
vars.put('GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE', 'false')
def result = runAndFail(['help'], initScript, testGradleVersion.gradleVersion, [], vars)
then:
assert !reportsDir.exists()
assert result.output.contains("Dependency Graph is not supported for ${testGradleVersion}")
where:
testGradleVersion << NO_DEPENDENCY_GRAPH_VERSIONS
}
def "constructs unique job correlator for each build invocation"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
def reportFile1 = new File(reportsDir, "CORRELATOR-1.json")
def reportFile2 = new File(reportsDir, "CORRELATOR-2.json")
buildFile << """
task firstTask {
doLast {
println "First"
}
}
task secondTask {
doLast {
println "Second"
}
}
"""
when:
run(['help'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert reportFile.exists()
when:
run(['first'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert reportFile.exists()
assert reportFile1.exists()
when:
run(['second'], initScript, testGradleVersion.gradleVersion, [], envVars)
then:
assert reportFile.exists()
assert reportFile1.exists()
assert reportFile2.exists()
where:
testGradleVersion << DEPENDENCY_GRAPH_VERSIONS
}
def getEnvVars() {
return [
GITHUB_DEPENDENCY_GRAPH_ENABLED: "true",
GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE: "true",
GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR: "CORRELATOR",
GITHUB_DEPENDENCY_GRAPH_JOB_ID: "1",
GITHUB_DEPENDENCY_GRAPH_REF: "main",
GITHUB_DEPENDENCY_GRAPH_SHA: "123456",
GITHUB_DEPENDENCY_GRAPH_WORKSPACE: testProjectDir.absolutePath,
DEPENDENCY_GRAPH_REPORT_DIR: reportsDir.absolutePath,
]
}
def getReportsDir() {
return new File(testProjectDir, 'build/reports/github-dependency-graph-snapshots')
}
def getReportFile() {
return new File(reportsDir, "CORRELATOR.json")
}
}

View File

@ -1,401 +0,0 @@
package com.gradle.gradlebuildaction
import org.gradle.testkit.runner.BuildResult
import org.gradle.util.GradleVersion
import static org.junit.Assume.assumeTrue
class TestDevelocityInjection extends BaseInitScriptTest {
static final List<TestGradleVersion> CCUD_COMPATIBLE_VERSIONS = ALL_VERSIONS - [GRADLE_3_X]
def initScript = 'gradle-build-action.inject-develocity.init.gradle'
private static final GradleVersion GRADLE_6 = GradleVersion.version('6.0')
def "does not apply Develocity plugins when not requested"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def result = run([], initScript, testGradleVersion.gradleVersion)
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "does not override Develocity plugin when already defined in project"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
given:
declareGePluginApplication(testGradleVersion.gradleVersion)
when:
def result = run(testGradleVersion, testConfig())
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "applies Develocity plugin via init script when not defined in project"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def result = run(testGradleVersion, testConfig())
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "applies Develocity and CCUD plugins via init script when not defined in project"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def result = run(testGradleVersion, testConfig().withCCUDPlugin())
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputContainsCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << CCUD_COMPATIBLE_VERSIONS
}
def "applies CCUD plugin via init script where Develocity plugin already applied"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
given:
declareGePluginApplication(testGradleVersion.gradleVersion)
when:
def result = run(testGradleVersion, testConfig().withCCUDPlugin())
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputContainsCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << CCUD_COMPATIBLE_VERSIONS
}
def "does not override CCUD plugin when already defined in project"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
given:
declareGePluginAndCcudPluginApplication(testGradleVersion.gradleVersion)
when:
def result = run(testGradleVersion, testConfig().withCCUDPlugin())
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << CCUD_COMPATIBLE_VERSIONS
}
def "ignores Develocity URL and allowUntrustedServer when Develocity plugin is not applied by the init script"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
given:
declareGePluginApplication(testGradleVersion.gradleVersion)
when:
def config = testConfig().withServer(URI.create('https://develocity-server.invalid'))
def result = run(testGradleVersion, config)
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "configures Develocity URL and allowUntrustedServer when Develocity plugin is applied by the init script"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def config = testConfig().withServer(mockScansServer.address)
def result = run(testGradleVersion, config)
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputContainsDevelocityConnectionInfo(result, mockScansServer.address.toString(), true)
outputMissesCcudPluginApplicationViaInitScript(result)
outputContainsPluginRepositoryInfo(result, 'https://plugins.gradle.org/m2')
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "enforces Develocity URL and allowUntrustedServer in project if enforce url parameter is enabled"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
given:
declareGePluginApplication(testGradleVersion.gradleVersion, URI.create('https://develocity-server.invalid'))
when:
def config = testConfig().withServer(mockScansServer.address, true)
def result = run(testGradleVersion, config)
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputEnforcesDevelocityUrl(result, mockScansServer.address.toString(), true)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "can configure alternative repository for plugins when Develocity plugin is applied by the init script"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def config = testConfig().withPluginRepository(new URI('https://plugins.grdev.net/m2'))
def result = run(testGradleVersion, config)
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputContainsDevelocityConnectionInfo(result, mockScansServer.address.toString(), true)
outputMissesCcudPluginApplicationViaInitScript(result)
outputContainsPluginRepositoryInfo(result, 'https://plugins.grdev.net/m2')
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << ALL_VERSIONS
}
def "stops gracefully when requested CCUD plugin version is <1.7"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def config = testConfig().withCCUDPlugin("1.6.6")
def result = run(testGradleVersion, config)
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
result.output.contains('Common Custom User Data Gradle plugin must be at least 1.7. Configured version is 1.6.6.')
where:
testGradleVersion << ALL_VERSIONS
}
def "can configure Develocity via CCUD system property overrides when CCUD plugin is inject via init script"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def config = testConfig().withCCUDPlugin().withServer(URI.create('https://develocity-server.invalid'))
def result = run(testGradleVersion, config, ["help", "-Dgradle.enterprise.url=${mockScansServer.address}".toString()])
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputContainsCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << CCUD_COMPATIBLE_VERSIONS
}
def "init script is configuration cache compatible"() {
assumeTrue testGradleVersion.compatibleWithCurrentJvm
when:
def config = testConfig().withCCUDPlugin()
def result = run(testGradleVersion, config, ["help", "--configuration-cache"])
then:
outputContainsDevelocityPluginApplicationViaInitScript(result, testGradleVersion.gradleVersion)
outputContainsCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
when:
result = run(testGradleVersion, config, ["help", "--configuration-cache"])
then:
outputMissesDevelocityPluginApplicationViaInitScript(result)
outputMissesCcudPluginApplicationViaInitScript(result)
and:
outputContainsBuildScanUrl(result)
where:
testGradleVersion << CONFIGURATION_CACHE_VERSIONS
}
void outputContainsBuildScanUrl(BuildResult result) {
def message = "Publishing build scan...\n${mockScansServer.address}s/$PUBLIC_BUILD_SCAN_ID"
assert result.output.contains(message)
assert 1 == result.output.count(message)
}
void outputContainsDevelocityPluginApplicationViaInitScript(BuildResult result, GradleVersion gradleVersion) {
def pluginApplicationLogMsgGradle4And5 = "Applying com.gradle.scan.plugin.BuildScanPlugin via init script"
def pluginApplicationLogMsgGradle6AndHigher = "Applying com.gradle.enterprise.gradleplugin.GradleEnterprisePlugin via init script"
if (gradleVersion < GRADLE_6) {
assert result.output.contains(pluginApplicationLogMsgGradle4And5)
assert 1 == result.output.count(pluginApplicationLogMsgGradle4And5)
assert !result.output.contains(pluginApplicationLogMsgGradle6AndHigher)
} else {
assert result.output.contains(pluginApplicationLogMsgGradle6AndHigher)
assert 1 == result.output.count(pluginApplicationLogMsgGradle6AndHigher)
assert !result.output.contains(pluginApplicationLogMsgGradle4And5)
}
}
void outputMissesDevelocityPluginApplicationViaInitScript(BuildResult result) {
def pluginApplicationLogMsgGradle4And5 = "Applying com.gradle.scan.plugin.BuildScanPlugin via init script"
def pluginApplicationLogMsgGradle6AndHigher = "Applying com.gradle.enterprise.gradleplugin.GradleEnterprisePlugin via init script"
assert !result.output.contains(pluginApplicationLogMsgGradle4And5)
assert !result.output.contains(pluginApplicationLogMsgGradle6AndHigher)
}
void outputContainsCcudPluginApplicationViaInitScript(BuildResult result) {
def pluginApplicationLogMsg = "Applying com.gradle.CommonCustomUserDataGradlePlugin via init script"
assert result.output.contains(pluginApplicationLogMsg)
assert 1 == result.output.count(pluginApplicationLogMsg)
}
void outputMissesCcudPluginApplicationViaInitScript(BuildResult result) {
def pluginApplicationLogMsg = "Applying com.gradle.CommonCustomUserDataGradlePlugin via init script"
assert !result.output.contains(pluginApplicationLogMsg)
}
void outputContainsDevelocityConnectionInfo(BuildResult result, String geUrl, boolean geAllowUntrustedServer) {
def geConnectionInfo = "Connection to Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer"
assert result.output.contains(geConnectionInfo)
assert 1 == result.output.count(geConnectionInfo)
}
void outputContainsPluginRepositoryInfo(BuildResult result, String gradlePluginRepositoryUrl) {
def repositoryInfo = "Develocity plugins resolution: ${gradlePluginRepositoryUrl}"
assert result.output.contains(repositoryInfo)
assert 1 == result.output.count(repositoryInfo)
}
void outputEnforcesDevelocityUrl(BuildResult result, String geUrl, boolean geAllowUntrustedServer) {
def enforceUrl = "Enforcing Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer"
assert result.output.contains(enforceUrl)
assert 1 == result.output.count(enforceUrl)
}
private BuildResult run(TestGradleVersion testGradleVersion, TestConfig config, List<String> args = ["help"]) {
if (testKitSupportsEnvVars(testGradleVersion.gradleVersion)) {
return run(args, initScript, testGradleVersion.gradleVersion, [], config.envVars)
} else {
return run(args, initScript, testGradleVersion.gradleVersion, config.jvmArgs, [:])
}
}
private boolean testKitSupportsEnvVars(GradleVersion gradleVersion) {
// TestKit supports env vars for Gradle 3.5+, except on M1 Mac where only 6.9+ is supported
def isM1Mac = System.getProperty("os.arch") == "aarch64"
if (isM1Mac) {
return gradleVersion >= GRADLE_6_X.gradleVersion
} else {
return gradleVersion >= GRADLE_3_X.gradleVersion
}
}
private TestConfig testConfig() {
new TestConfig()
}
class TestConfig {
String serverUrl = mockScansServer.address.toString()
boolean enforceUrl = false
String ccudPluginVersion = null
String pluginRepositoryUrl = null
TestConfig withCCUDPlugin(String version = CCUD_PLUGIN_VERSION) {
ccudPluginVersion = version
return this
}
TestConfig withServer(URI url, boolean enforceUrl = false) {
serverUrl = url.toASCIIString()
this.enforceUrl = enforceUrl
return this
}
TestConfig withPluginRepository(URI pluginRepositoryUrl) {
this.pluginRepositoryUrl = pluginRepositoryUrl
return this
}
def getEnvVars() {
Map<String, String> envVars = [
DEVELOCITY_INJECTION_ENABLED: "true",
DEVELOCITY_URL: serverUrl,
DEVELOCITY_ALLOW_UNTRUSTED_SERVER: "true",
DEVELOCITY_PLUGIN_VERSION: DEVELOCITY_PLUGIN_VERSION,
DEVELOCITY_BUILD_SCAN_UPLOAD_IN_BACKGROUND: "true" // Need to upload in background since our Mock server doesn't cope with foreground upload
]
if (enforceUrl) envVars.put("DEVELOCITY_ENFORCE_URL", "true")
if (ccudPluginVersion != null) envVars.put("DEVELOCITY_CCUD_PLUGIN_VERSION", ccudPluginVersion)
if (pluginRepositoryUrl != null) envVars.put("GRADLE_PLUGIN_REPOSITORY_URL", pluginRepositoryUrl)
return envVars
}
def getJvmArgs() {
List<String> jvmArgs = [
"-Ddevelocity.injection-enabled=true",
"-Ddevelocity.url=$serverUrl",
"-Ddevelocity.allow-untrusted-server=true",
"-Ddevelocity.plugin.version=$DEVELOCITY_PLUGIN_VERSION",
"-Ddevelocity.build-scan.upload-in-background=true"
]
if (enforceUrl) jvmArgs.add("-Ddevelocity.enforce-url=true")
if (ccudPluginVersion != null) jvmArgs.add("-Ddevelocity.ccud-plugin.version=$ccudPluginVersion")
if (pluginRepositoryUrl != null) jvmArgs.add("-Dgradle.plugin-repository.url=$pluginRepositoryUrl")
return jvmArgs.collect { it.toString() } // Convert from GStrings
}
}
}

View File

@ -1,87 +0,0 @@
import * as exec from '@actions/exec'
import fs from 'fs'
import path from 'path'
import {CacheCleaner} from '../../src/cache-cleaner'
jest.setTimeout(120000)
test('will cleanup unused dependency jars and build-cache entries', async () => {
const projectRoot = prepareTestProject()
const gradleUserHome = path.resolve(projectRoot, 'HOME')
const tmpDir = path.resolve(projectRoot, 'tmp')
const cacheCleaner = new CacheCleaner(gradleUserHome, tmpDir)
await runGradleBuild(projectRoot, 'build', '3.1')
await cacheCleaner.prepare()
await runGradleBuild(projectRoot, 'build', '3.1.1')
const commonsMath31 = path.resolve(gradleUserHome, "caches/modules-2/files-2.1/org.apache.commons/commons-math3/3.1")
const commonsMath311 = path.resolve(gradleUserHome, "caches/modules-2/files-2.1/org.apache.commons/commons-math3/3.1.1")
const buildCacheDir = path.resolve(gradleUserHome, "caches/build-cache-1")
expect(fs.existsSync(commonsMath31)).toBe(true)
expect(fs.existsSync(commonsMath311)).toBe(true)
expect(fs.readdirSync(buildCacheDir).length).toBe(4)
await cacheCleaner.forceCleanup()
expect(fs.existsSync(commonsMath31)).toBe(false)
expect(fs.existsSync(commonsMath311)).toBe(true)
expect(fs.readdirSync(buildCacheDir).length).toBe(3)
})
test('will cleanup unused gradle versions', async () => {
const projectRoot = prepareTestProject()
const gradleUserHome = path.resolve(projectRoot, 'HOME')
const tmpDir = path.resolve(projectRoot, 'tmp')
const cacheCleaner = new CacheCleaner(gradleUserHome, tmpDir)
// Initialize HOME with 2 different Gradle versions
await runGradleWrapperBuild(projectRoot, 'build')
await runGradleBuild(projectRoot, 'build')
await cacheCleaner.prepare()
// Run with only one of these versions
await runGradleBuild(projectRoot, 'build')
const gradle802 = path.resolve(gradleUserHome, "caches/8.0.2")
const wrapper802 = path.resolve(gradleUserHome, "wrapper/dists/gradle-8.0.2-bin")
const gradleCurrent = path.resolve(gradleUserHome, "caches/8.5")
expect(fs.existsSync(gradle802)).toBe(true)
expect(fs.existsSync(wrapper802)).toBe(true)
expect(fs.existsSync(gradleCurrent)).toBe(true)
await cacheCleaner.forceCleanup()
expect(fs.existsSync(gradle802)).toBe(false)
expect(fs.existsSync(wrapper802)).toBe(false)
expect(fs.existsSync(gradleCurrent)).toBe(true)
})
async function runGradleBuild(projectRoot: string, args: string, version: string = '3.1'): Promise<void> {
const status31 = await exec.exec(`gradle -g HOME --no-daemon --build-cache -Dcommons_math3_version="${version}" ${args}`, [], {
cwd: projectRoot
})
console.log(`Gradle User Home initialized with commons_math3_version=${version} ${args}`)
}
async function runGradleWrapperBuild(projectRoot: string, args: string, version: string = '3.1'): Promise<void> {
const status31 = await exec.exec(`./gradlew -g HOME --no-daemon --build-cache -Dcommons_math3_version="${version}" ${args}`, [], {
cwd: projectRoot
})
console.log(`Gradle User Home initialized with commons_math3_version="${version}" ${args}`)
}
function prepareTestProject(): string {
const projectRoot = 'test/jest/resources/cache-cleanup'
fs.rmSync(path.resolve(projectRoot, 'HOME'), { recursive: true, force: true })
fs.rmSync(path.resolve(projectRoot, 'tmp'), { recursive: true, force: true })
fs.rmSync(path.resolve(projectRoot, 'build'), { recursive: true, force: true })
fs.rmSync(path.resolve(projectRoot, '.gradle'), { recursive: true, force: true })
return projectRoot
}

View File

@ -1,95 +0,0 @@
import {CacheEntryListener, CacheListener} from '../../src/cache-reporting'
describe('caching report', () => {
describe('reports not fully restored', () => {
it('with one requested entry report', async () => {
const report = new CacheListener()
report.entry('foo').markRequested('1', ['2'])
report.entry('bar').markRequested('3').markRestored('4', 500)
expect(report.fullyRestored).toBe(false)
})
})
describe('reports fully restored', () => {
it('when empty', async () => {
const report = new CacheListener()
expect(report.fullyRestored).toBe(true)
})
it('with empty entry reports', async () => {
const report = new CacheListener()
report.entry('foo')
report.entry('bar')
expect(report.fullyRestored).toBe(true)
})
it('with restored entry report', async () => {
const report = new CacheListener()
report.entry('bar').markRequested('3').markRestored('4', 300)
expect(report.fullyRestored).toBe(true)
})
it('with multiple restored entry reportss', async () => {
const report = new CacheListener()
report.entry('foo').markRestored('4', 3300)
report.entry('bar').markRequested('3').markRestored('4', 333)
expect(report.fullyRestored).toBe(true)
})
})
describe('can be stringified and rehydrated', () => {
it('when empty', async () => {
const report = new CacheListener()
const stringRep = report.stringify()
const reportClone: CacheListener = CacheListener.rehydrate(stringRep)
expect(reportClone.cacheEntries).toEqual([])
// Can call methods on rehydrated
expect(reportClone.entry('foo')).toBeInstanceOf(CacheEntryListener)
})
it('with entry reports', async () => {
const report = new CacheListener()
report.entry('foo')
report.entry('bar')
report.entry('baz')
const stringRep = report.stringify()
const reportClone: CacheListener = CacheListener.rehydrate(stringRep)
expect(reportClone.cacheEntries.length).toBe(3)
expect(reportClone.cacheEntries[0].entryName).toBe('foo')
expect(reportClone.cacheEntries[1].entryName).toBe('bar')
expect(reportClone.cacheEntries[2].entryName).toBe('baz')
expect(reportClone.entry('foo')).toBe(reportClone.cacheEntries[0])
})
it('with rehydrated entry report', async () => {
const report = new CacheListener()
const entryReport = report.entry('foo')
entryReport.markRequested('1', ['2', '3'])
entryReport.markSaved('4', 100)
const stringRep = report.stringify()
const reportClone: CacheListener = CacheListener.rehydrate(stringRep)
const entryClone = reportClone.entry('foo')
expect(entryClone.requestedKey).toBe('1')
expect(entryClone.requestedRestoreKeys).toEqual(['2', '3'])
expect(entryClone.savedKey).toBe('4')
})
it('with live entry report', async () => {
const report = new CacheListener()
const entryReport = report.entry('foo')
entryReport.markRequested('1', ['2', '3'])
const stringRep = report.stringify()
const reportClone: CacheListener = CacheListener.rehydrate(stringRep)
const entryClone = reportClone.entry('foo')
// Check type and call method on rehydrated entry report
expect(entryClone).toBeInstanceOf(CacheEntryListener)
entryClone.markSaved('4', 100)
expect(entryClone.requestedKey).toBe('1')
expect(entryClone.requestedRestoreKeys).toEqual(['2', '3'])
expect(entryClone.savedKey).toBe('4')
})
})
})

View File

@ -1,20 +0,0 @@
import * as cacheUtils from '../../src/cache-utils'
describe('cacheUtils-utils', () => {
describe('can hash', () => {
it('a string', async () => {
const hash = cacheUtils.hashStrings(['foo'])
expect(hash).toBe('acbd18db4cc2f85cedef654fccc4a4d8')
})
it('multiple strings', async () => {
const hash = cacheUtils.hashStrings(['foo', 'bar', 'baz'])
expect(hash).toBe('6df23dc03f9b54cc38a0fc1483df6e21')
})
it('normalized filenames', async () => {
const fileNames = ['/foo/bar/baz.zip', '../boo.html']
const posixHash = cacheUtils.hashFileNames(fileNames)
const windowsHash = cacheUtils.hashFileNames(fileNames)
expect(posixHash).toBe(windowsHash)
})
})
})

View File

@ -1,34 +0,0 @@
import * as dependencyGraph from '../../src/dependency-graph'
describe('dependency-graph', () => {
describe('constructs job correlator', () => {
it('removes commas from workflow name', () => {
const id = dependencyGraph.constructJobCorrelator('Workflow, with,commas', 'jobid', '{}')
expect(id).toBe('workflow_withcommas-jobid')
})
it('removes non word characters', () => {
const id = dependencyGraph.constructJobCorrelator('Workflow!_with()characters', 'job-*id', '{"foo": "bar!@#$%^&*("}')
expect(id).toBe('workflow_withcharacters-job-id-bar')
})
it('replaces spaces', () => {
const id = dependencyGraph.constructJobCorrelator('Workflow !_ with () characters, and spaces', 'job-*id', '{"foo": "bar!@#$%^&*("}')
expect(id).toBe('workflow___with_characters_and_spaces-job-id-bar')
})
it('without matrix', () => {
const id = dependencyGraph.constructJobCorrelator('workflow', 'jobid', 'null')
expect(id).toBe('workflow-jobid')
})
it('with dashes in values', () => {
const id = dependencyGraph.constructJobCorrelator('workflow-name', 'job-id', '{"os": "ubuntu-latest"}')
expect(id).toBe('workflow-name-job-id-ubuntu-latest')
})
it('with single matrix value', () => {
const id = dependencyGraph.constructJobCorrelator('workflow', 'jobid', '{"os": "windows"}')
expect(id).toBe('workflow-jobid-windows')
})
it('with composite matrix value', () => {
const id = dependencyGraph.constructJobCorrelator('workflow', 'jobid', '{"os": "windows", "java-version": "21.1", "other": "Value, with COMMA"}')
expect(id).toBe('workflow-jobid-windows-211-value_with_comma')
})
})
})

View File

@ -1,22 +0,0 @@
import * as inputParams from '../../src/input-params'
describe('input params', () => {
describe('parses numeric input', () => {
it('uses default value', () => {
const val = inputParams.parseNumericInput('param-name', '', 88)
expect(val).toBe(88)
})
it('parses numeric input', () => {
const val = inputParams.parseNumericInput('param-name', '34', 88)
expect(val).toBe(34)
})
it('fails on non-numeric input', () => {
const t = () => {
inputParams.parseNumericInput('param-name', 'xyz', 88)
};
expect(t).toThrow(TypeError)
expect(t).toThrow("The value 'xyz' is not a valid numeric value for 'param-name'.")
})
})
})

View File

@ -1,6 +0,0 @@
#
# https://help.github.com/articles/dealing-with-line-endings/
#
# These are explicitly windows files and should use crlf
*.bat text eol=crlf

View File

@ -1,8 +0,0 @@
# Ignore Gradle project-specific cache directory
.gradle
# Ignore Gradle build output directory
build
HOME
tmp

View File

@ -1,11 +0,0 @@
plugins {
id 'java-library'
}
repositories {
mavenCentral()
}
dependencies {
api "org.apache.commons:commons-math3:${System.properties['commons_math3_version']}"
}

View File

@ -1,6 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
# Deliberately not using the latest Gradle version for cache cleanup testing
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -1,245 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@ -1,92 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1 +0,0 @@
rootProject.name = 'unused-dependencies'

View File

@ -1,10 +0,0 @@
/*
* This Java source file was generated by the Gradle 'init' task.
*/
package unused.dependencies;
public class Library {
public boolean someLibraryMethod() {
return true;
}
}

View File

@ -1,63 +0,0 @@
{
"compilerOptions": {
/* Basic Options */
"incremental": false, /* Enable incremental compilation */
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
"removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"strictNullChecks": true, /* Enable strict null checks. */
"strictFunctionTypes": true, /* Enable strict checking of function types. */
"strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
"strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
"noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
"alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
"noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
"noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}