Merge branch 'master' of http://github.com/actions/cache into with-retries

This commit is contained in:
Dave Hadka 2020-05-11 12:55:11 -04:00
commit a0024e2bd0
3 changed files with 22 additions and 19 deletions

View file

@ -2376,7 +2376,7 @@ function getContentRange(start, end) {
// Content-Range: bytes 0-199/* // Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`; return `bytes ${start}-${end}/*`;
} }
function uploadChunk(httpClient, resourceUrl, data, start, end) { function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end - core.debug(`Uploading chunk of size ${end -
start + start +
@ -2385,7 +2385,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end) "Content-Range": getContentRange(start, end)
}; };
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders)); yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
}); });
} }
function parseEnvNumber(key) { function parseEnvNumber(key) {
@ -2415,13 +2415,12 @@ function uploadFile(httpClient, cacheId, archivePath) {
const start = offset; const start = offset;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd, fd,
start, start,
end, end,
autoClose: false autoClose: false
}); }), start, end);
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
} }
}))); })));
} }

9
dist/save/index.js vendored
View file

@ -2376,7 +2376,7 @@ function getContentRange(start, end) {
// Content-Range: bytes 0-199/* // Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`; return `bytes ${start}-${end}/*`;
} }
function uploadChunk(httpClient, resourceUrl, data, start, end) { function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end - core.debug(`Uploading chunk of size ${end -
start + start +
@ -2385,7 +2385,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) {
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end) "Content-Range": getContentRange(start, end)
}; };
yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders)); yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders));
}); });
} }
function parseEnvNumber(key) { function parseEnvNumber(key) {
@ -2415,13 +2415,12 @@ function uploadFile(httpClient, cacheId, archivePath) {
const start = offset; const start = offset;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, {
fd, fd,
start, start,
end, end,
autoClose: false autoClose: false
}); }), start, end);
yield uploadChunk(httpClient, resourceUrl, chunk, start, end);
} }
}))); })));
} }

View file

@ -287,7 +287,7 @@ function getContentRange(start: number, end: number): string {
async function uploadChunk( async function uploadChunk(
httpClient: HttpClient, httpClient: HttpClient,
resourceUrl: string, resourceUrl: string,
data: NodeJS.ReadableStream, openStream: () => NodeJS.ReadableStream,
start: number, start: number,
end: number end: number
): Promise<void> { ): Promise<void> {
@ -307,7 +307,12 @@ async function uploadChunk(
await retryHttpClientResponse( await retryHttpClientResponse(
`uploadChunk (start: ${start}, end: ${end})`, `uploadChunk (start: ${start}, end: ${end})`,
() => () =>
httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders) httpClient.sendStream(
"PATCH",
resourceUrl,
openStream(),
additionalHeaders
)
); );
} }
@ -349,17 +354,17 @@ async function uploadFile(
const start = offset; const start = offset;
const end = offset + chunkSize - 1; const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE; offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
await uploadChunk( await uploadChunk(
httpClient, httpClient,
resourceUrl, resourceUrl,
chunk, () =>
fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}),
start, start,
end end
); );