mirror of
https://github.com/gradle/gradle-build-action.git
synced 2025-06-03 15:06:09 +02:00
Added S3BucketCache
implementation
Signed-off-by: Guillermo Mazzola <guillermo.mazzola@glovoapp.com>
This commit is contained in:
parent
4ff2ffb7bf
commit
734fca3807
6 changed files with 2611 additions and 10 deletions
12
action.yml
12
action.yml
|
@ -30,7 +30,9 @@ inputs:
|
|||
|
||||
cache-provider:
|
||||
description: |
|
||||
The cache provider to use for caching files. Currently only supports `github`.
|
||||
The cache provider to use for caching files. It can be:
|
||||
- `github` (default) which uses GitHub Cache API
|
||||
- An `AWS S3` bucket virtual host URL. i.e. https://<bucket-name>.s3.<region-code>.amazonaws.com/
|
||||
required: false
|
||||
default: github
|
||||
|
||||
|
@ -64,6 +66,14 @@ inputs:
|
|||
required: false
|
||||
default: true
|
||||
|
||||
aws-access-key-id:
|
||||
description: AWS access key id to access your bucket
|
||||
required: false
|
||||
|
||||
aws-secret-access-key:
|
||||
description: AWS secret access key to access your bucket
|
||||
required: false
|
||||
|
||||
# EXPERIMENTAL & INTERNAL ACTION INPUTS
|
||||
# The following action properties allow fine-grained tweaking of the action caching behaviour.
|
||||
# These properties are experimental and not (yet) designed for production use, and may change without notice in a subsequent release of `gradle-build-action`.
|
||||
|
|
2481
package-lock.json
generated
2481
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -33,11 +33,12 @@
|
|||
"@actions/glob": "0.4.0",
|
||||
"@actions/http-client": "2.1.0",
|
||||
"@actions/tool-cache": "2.0.1",
|
||||
"@aws-sdk/client-s3": "^3.359.0",
|
||||
"string-argv": "0.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "16.11.21",
|
||||
"@types/jest": "29.5.1",
|
||||
"@types/node": "16.11.21",
|
||||
"@types/unzipper": "0.10.6",
|
||||
"@typescript-eslint/parser": "5.59.7",
|
||||
"@vercel/ncc": "0.36.1",
|
||||
|
@ -45,7 +46,7 @@
|
|||
"eslint-plugin-github": "4.7.0",
|
||||
"eslint-plugin-jest": "27.2.1",
|
||||
"jest": "29.5.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"patch-package": "7.0.0",
|
||||
"prettier": "2.8.8",
|
||||
"ts-jest": "29.1.0",
|
||||
|
|
99
src/cache-provider-s3.ts
Normal file
99
src/cache-provider-s3.ts
Normal file
|
@ -0,0 +1,99 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as AWS from '@aws-sdk/client-s3'
|
||||
import {CacheEntry, CacheProvider} from './cache-provider'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import utils from '@actions/cache/lib/internal/cacheUtils'
|
||||
import {createTar, extractTar, listTar} from '@actions/cache/lib/internal/tar'
|
||||
import {NoSuchKey} from '@aws-sdk/client-s3'
|
||||
import {Readable} from 'stream'
|
||||
|
||||
class S3BucketCache implements CacheProvider {
|
||||
private readonly s3: AWS.S3
|
||||
private readonly bucket: string
|
||||
|
||||
constructor(s3: AWS.S3, bucket: string) {
|
||||
this.s3 = s3
|
||||
this.bucket = bucket
|
||||
}
|
||||
|
||||
async saveCache(paths: string[], key: string): Promise<CacheEntry> {
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
const archiveFolder = await utils.createTempDirectory()
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod))
|
||||
const cachePaths = await utils.resolvePaths(paths)
|
||||
|
||||
try {
|
||||
await createTar(archiveFolder, cachePaths, compressionMethod)
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.debug(`File Size: ${archiveFileSize}`)
|
||||
|
||||
core.debug(`Uploading to S3 bucket ${this.bucket}...`)
|
||||
const content = fs.createReadStream(archivePath)
|
||||
await this.s3.putObject({Bucket: this.bucket, Key: key, Body: content, ContentLength: archiveFileSize})
|
||||
return {key, size: archiveFileSize}
|
||||
} finally {
|
||||
try {
|
||||
await utils.unlinkFile(archivePath)
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[]): Promise<CacheEntry | undefined> {
|
||||
const keys = [primaryKey, ...(restoreKeys || [])]
|
||||
core.debug('Resolved Keys:')
|
||||
core.debug(JSON.stringify(keys))
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
const archivePath = path.join(await utils.createTempDirectory(), utils.getCacheFileName(compressionMethod))
|
||||
core.debug(`Archive Path: ${archivePath}`)
|
||||
|
||||
for (const key of keys) {
|
||||
core.info(`Trying resolve cache for key: ${key}`)
|
||||
try {
|
||||
const object = await this.s3.getObject({Bucket: this.bucket, Key: key})
|
||||
|
||||
core.info(`Cache hit found for key: ${key}`)
|
||||
const content = object.Body as Readable
|
||||
const fileStream = fs.createWriteStream(archivePath)
|
||||
try {
|
||||
content.pipe(fileStream)
|
||||
} finally {
|
||||
fileStream.close()
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`)
|
||||
await extractTar(archivePath, compressionMethod)
|
||||
|
||||
core.info('Cache restored successfully')
|
||||
return {key, size: archiveFileSize}
|
||||
} catch (error) {
|
||||
if (error instanceof NoSuchKey) continue
|
||||
throw error
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
export default function createS3Cache(
|
||||
bucketURL: string,
|
||||
accessKeyId: string | undefined,
|
||||
secretAccessKey: string | undefined
|
||||
): CacheProvider | undefined {
|
||||
const regEx = /https:\/\/(.*?).s3.(.*?).amazonaws.com\/?/gi
|
||||
const match = bucketURL.match(regEx)
|
||||
if (!match) return
|
||||
|
||||
const [bucket, region] = match.slice(1)
|
||||
const credentials = accessKeyId && secretAccessKey ? {accessKeyId, secretAccessKey} : undefined
|
||||
const s3 = new AWS.S3({region, credentials})
|
||||
return new S3BucketCache(s3, bucket)
|
||||
}
|
|
@ -12,6 +12,7 @@ import {CacheEntryListener} from './cache-reporting'
|
|||
import {CacheEntry, CacheProvider} from './cache-provider'
|
||||
import {ReserveCacheError, ValidationError} from '@actions/cache'
|
||||
import createGitHubCache from './cache-provider-github'
|
||||
import createS3Cache from './cache-provider-s3'
|
||||
|
||||
const CACHE_PROTOCOL_VERSION = 'v8-'
|
||||
|
||||
|
@ -24,7 +25,16 @@ const CACHE_KEY_JOB_EXECUTION_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_EXECUTION
|
|||
export const cache = provisionCache()
|
||||
|
||||
function provisionCache(): CacheProvider | undefined {
|
||||
return createGitHubCache()
|
||||
const provider = params.getCacheProvider()
|
||||
switch (provider) {
|
||||
case 'github':
|
||||
return createGitHubCache()
|
||||
default: {
|
||||
const s3Provider = createS3Cache(provider, params.getAWSAccessKeyId(), params.getAWSSecretAccessKey())
|
||||
if (s3Provider) return s3Provider
|
||||
}
|
||||
}
|
||||
throw new TypeError(`The value '${provider}' is not supported cache provider.`)
|
||||
}
|
||||
|
||||
export function isCacheDisabled(): boolean {
|
||||
|
|
|
@ -25,6 +25,10 @@ export function isCacheCleanupEnabled(): boolean {
|
|||
return getBooleanInput('gradle-home-cache-cleanup')
|
||||
}
|
||||
|
||||
export function getCacheProvider(): string {
|
||||
return core.getInput('cache-provider') || 'github'
|
||||
}
|
||||
|
||||
export function getCacheIncludes(): string[] {
|
||||
return core.getMultilineInput('gradle-home-cache-includes')
|
||||
}
|
||||
|
@ -63,6 +67,14 @@ export function isJobSummaryEnabled(): boolean {
|
|||
return getBooleanInput('generate-job-summary', true)
|
||||
}
|
||||
|
||||
export function getAWSAccessKeyId(): string {
|
||||
return core.getInput('aws-access-key-id')
|
||||
}
|
||||
|
||||
export function getAWSSecretAccessKey(): string {
|
||||
return core.getInput('aws-secret-access-key')
|
||||
}
|
||||
|
||||
function getBooleanInput(paramName: string, paramDefault = false): boolean {
|
||||
const paramValue = core.getInput(paramName)
|
||||
switch (paramValue.toLowerCase().trim()) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue