mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-05 02:02:53 +01:00
Merge branch 'master' of http://github.com/actions/cache into with-retries
This commit is contained in:
commit
a0024e2bd0
3 changed files with 22 additions and 19 deletions
9
dist/restore/index.js
vendored
9
dist/restore/index.js
vendored
|
@ -2376,7 +2376,7 @@ function getContentRange(start, end) {
|
|||
// Content-Range: bytes 0-199/*
|
||||
return `bytes ${start}-${end}/*`;
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
|
@ -2385,7 +2385,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
|||
"Content-Type": "application/octet-stream",
|
||||
"Content-Range": getContentRange(start, end)
|
||||
};
|
||||
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders));
|
||||
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
|
||||
});
|
||||
}
|
||||
function parseEnvNumber(key) {
|
||||
|
@ -2415,13 +2415,12 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
|||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
const chunk = fs.createReadStream(archivePath, {
|
||||
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
});
|
||||
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
|
||||
}), start, end);
|
||||
}
|
||||
})));
|
||||
}
|
||||
|
|
9
dist/save/index.js
vendored
9
dist/save/index.js
vendored
|
@ -2376,7 +2376,7 @@ function getContentRange(start, end) {
|
|||
// Content-Range: bytes 0-199/*
|
||||
return `bytes ${start}-${end}/*`;
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
|
@ -2385,7 +2385,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
|
|||
"Content-Type": "application/octet-stream",
|
||||
"Content-Range": getContentRange(start, end)
|
||||
};
|
||||
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders));
|
||||
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
|
||||
});
|
||||
}
|
||||
function parseEnvNumber(key) {
|
||||
|
@ -2415,13 +2415,12 @@ function uploadFile(httpClient, cacheId, archivePath) {
|
|||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
const chunk = fs.createReadStream(archivePath, {
|
||||
yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
});
|
||||
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
|
||||
}), start, end);
|
||||
}
|
||||
})));
|
||||
}
|
||||
|
|
|
@ -287,7 +287,7 @@ function getContentRange(start: number, end: number): string {
|
|||
async function uploadChunk(
|
||||
httpClient: HttpClient,
|
||||
resourceUrl: string,
|
||||
data: NodeJS.ReadableStream,
|
||||
openStream: () => NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number
|
||||
): Promise<void> {
|
||||
|
@ -307,7 +307,12 @@ async function uploadChunk(
|
|||
await retryHttpClientResponse(
|
||||
`uploadChunk (start: ${start}, end: ${end})`,
|
||||
() =>
|
||||
httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders)
|
||||
httpClient.sendStream(
|
||||
"PATCH",
|
||||
resourceUrl,
|
||||
openStream(),
|
||||
additionalHeaders
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -349,17 +354,17 @@ async function uploadFile(
|
|||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
const chunk = fs.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
});
|
||||
|
||||
await uploadChunk(
|
||||
httpClient,
|
||||
resourceUrl,
|
||||
chunk,
|
||||
() =>
|
||||
fs.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}),
|
||||
start,
|
||||
end
|
||||
);
|
||||
|
|
Loading…
Reference in a new issue