mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-23 02:51:22 +01:00
Separate out reserve call
This commit is contained in:
parent
7f6523f535
commit
436418ea07
2 changed files with 37 additions and 32 deletions
|
@ -15,6 +15,7 @@ import {
|
||||||
ReserverCacheResponse
|
ReserverCacheResponse
|
||||||
} from "./contracts";
|
} from "./contracts";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
import { Duplex } from "stream";
|
||||||
|
|
||||||
const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
|
const MAX_CHUNK_SIZE = 4000000; // 4 MB Chunks
|
||||||
|
|
||||||
|
@ -50,18 +51,20 @@ function getRequestOptions(): IRequestOptions {
|
||||||
return requestOptions;
|
return requestOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
function createRestClient(): RestClient {
|
||||||
keys: string[]
|
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
|
||||||
const cacheUrl = getCacheApiUrl();
|
|
||||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||||
|
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
return new RestClient("actions/cache", getCacheApiUrl(), [
|
||||||
|
|
||||||
const restClient = new RestClient("actions/cache", cacheUrl, [
|
|
||||||
bearerCredentialHandler
|
bearerCredentialHandler
|
||||||
]);
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCacheEntry(
|
||||||
|
keys: string[]
|
||||||
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
|
const restClient = createRestClient();
|
||||||
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||||
|
|
||||||
const response = await restClient.get<ArtifactCacheEntry>(
|
const response = await restClient.get<ArtifactCacheEntry>(
|
||||||
resource,
|
resource,
|
||||||
|
@ -106,11 +109,12 @@ export async function downloadCache(
|
||||||
await pipeResponseToStream(downloadResponse, stream);
|
await pipeResponseToStream(downloadResponse, stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns Cache ID
|
// Reserve Cache
|
||||||
async function reserveCache(
|
export async function reserveCache(
|
||||||
restClient: RestClient,
|
|
||||||
key: string
|
key: string
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
|
const restClient = createRestClient();
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key
|
key
|
||||||
};
|
};
|
||||||
|
@ -119,7 +123,7 @@ async function reserveCache(
|
||||||
reserveCacheRequest
|
reserveCacheRequest
|
||||||
);
|
);
|
||||||
|
|
||||||
return response?.result?.cacheId || -1;
|
return response?.result?.cacheId ?? -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getContentRange(start: number, length: number): string {
|
function getContentRange(start: number, length: number): string {
|
||||||
|
@ -131,9 +135,17 @@ function getContentRange(start: number, length: number): string {
|
||||||
return `bytes ${start}-${start + length - 1}/*`;
|
return `bytes ${start}-${start + length - 1}/*`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function bufferToStream(buffer: Buffer): NodeJS.ReadableStream {
|
||||||
|
const stream = new Duplex();
|
||||||
|
stream.push(buffer);
|
||||||
|
stream.push(null);
|
||||||
|
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
async function uploadChunk(
|
async function uploadChunk(
|
||||||
restClient: RestClient,
|
restClient: RestClient,
|
||||||
cacheId: number,
|
resourceUrl: string,
|
||||||
data: Buffer,
|
data: Buffer,
|
||||||
offset: number
|
offset: number
|
||||||
): Promise<IRestResponse<void>> {
|
): Promise<IRestResponse<void>> {
|
||||||
|
@ -143,11 +155,8 @@ async function uploadChunk(
|
||||||
"Content-Range": getContentRange(offset, data.byteLength)
|
"Content-Range": getContentRange(offset, data.byteLength)
|
||||||
};
|
};
|
||||||
|
|
||||||
return await restClient.update(
|
const stream = bufferToStream(data);
|
||||||
cacheId.toString(),
|
return await restClient.uploadStream<void>("PATCH", resourceUrl, stream, requestOptions);
|
||||||
data.toString("utf8"),
|
|
||||||
requestOptions
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function commitCache(
|
async function commitCache(
|
||||||
|
@ -165,21 +174,10 @@ async function commitCache(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function saveCache(
|
export async function saveCache(
|
||||||
key: string,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
const restClient = createRestClient();
|
||||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
|
||||||
|
|
||||||
const restClient = new RestClient("actions/cache", getCacheApiUrl(), [
|
|
||||||
bearerCredentialHandler
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Reserve Cache
|
|
||||||
const cacheId = await reserveCache(restClient, key);
|
|
||||||
if (cacheId < 0) {
|
|
||||||
throw new Error(`Unable to reserve cache.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const stream = fs.createReadStream(archivePath);
|
const stream = fs.createReadStream(archivePath);
|
||||||
|
@ -188,11 +186,12 @@ export async function saveCache(
|
||||||
streamIsClosed = true;
|
streamIsClosed = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const resourceUrl = getCacheApiUrl() + cacheId.toString();
|
||||||
const uploads: Promise<IRestResponse<void>>[] = [];
|
const uploads: Promise<IRestResponse<void>>[] = [];
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
while (!streamIsClosed) {
|
while (!streamIsClosed) {
|
||||||
const chunk: Buffer = stream.read(MAX_CHUNK_SIZE);
|
const chunk: Buffer = stream.read(MAX_CHUNK_SIZE);
|
||||||
uploads.push(uploadChunk(restClient, cacheId, chunk, offset));
|
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,12 @@ async function run(): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
|
||||||
|
if (cacheId < 0) {
|
||||||
|
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const cachePath = utils.resolvePath(
|
const cachePath = utils.resolvePath(
|
||||||
core.getInput(Inputs.Path, { required: true })
|
core.getInput(Inputs.Path, { required: true })
|
||||||
);
|
);
|
||||||
|
@ -77,7 +83,7 @@ async function run(): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await cacheHttpClient.saveCache(primaryKey, archivePath);
|
await cacheHttpClient.saveCache(cacheId, archivePath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
utils.logWarning(error.message);
|
utils.logWarning(error.message);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue