mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-23 02:51:22 +01:00
Change to on end
This commit is contained in:
parent
2cbd952179
commit
131e247bd2
3 changed files with 18 additions and 6 deletions
8
dist/restore/index.js
vendored
8
dist/restore/index.js
vendored
|
@ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) {
|
|||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
stream.on("close", () => {
|
||||
core.debug("Stream is closed");
|
||||
stream.on("end", () => {
|
||||
core.debug("Stream is ended");
|
||||
streamIsClosed = true;
|
||||
});
|
||||
const resourceUrl = getCacheApiUrl() + cacheId.toString();
|
||||
|
@ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) {
|
|||
while (!streamIsClosed) {
|
||||
core.debug(`Offset: ${offset}`);
|
||||
const chunk = stream.read(MAX_CHUNK_SIZE);
|
||||
if (chunk == null) {
|
||||
core.debug(`Chunk is null, reading is over?`);
|
||||
break;
|
||||
}
|
||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
|
|
8
dist/save/index.js
vendored
8
dist/save/index.js
vendored
|
@ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) {
|
|||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
stream.on("close", () => {
|
||||
core.debug("Stream is closed");
|
||||
stream.on("end", () => {
|
||||
core.debug("Stream is ended");
|
||||
streamIsClosed = true;
|
||||
});
|
||||
const resourceUrl = getCacheApiUrl() + cacheId.toString();
|
||||
|
@ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) {
|
|||
while (!streamIsClosed) {
|
||||
core.debug(`Offset: ${offset}`);
|
||||
const chunk = stream.read(MAX_CHUNK_SIZE);
|
||||
if (chunk == null) {
|
||||
core.debug(`Chunk is null, reading is over?`);
|
||||
break;
|
||||
}
|
||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
|
|
|
@ -185,8 +185,8 @@ export async function saveCache(
|
|||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
stream.on("close", () => {
|
||||
core.debug("Stream is closed");
|
||||
stream.on("end", () => {
|
||||
core.debug("Stream is ended");
|
||||
streamIsClosed = true;
|
||||
});
|
||||
|
||||
|
@ -196,6 +196,10 @@ export async function saveCache(
|
|||
while (!streamIsClosed) {
|
||||
core.debug(`Offset: ${offset}`);
|
||||
const chunk: Buffer = stream.read(MAX_CHUNK_SIZE);
|
||||
if (chunk == null) {
|
||||
core.debug(`Chunk is null, reading is over?`);
|
||||
break;
|
||||
}
|
||||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue