mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-05 10:12:55 +01:00
Bad implementation of parallel await
This commit is contained in:
parent
b425e87f79
commit
ba6476e454
3 changed files with 78 additions and 10 deletions
29
dist/restore/index.js
vendored
29
dist/restore/index.js
vendored
|
@ -1615,6 +1615,28 @@ function commitCache(restClient, cacheId, filesize) {
|
||||||
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
function parallelAwait(queue, concurrency) {
|
||||||
|
var _a;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const workQueue = queue.reverse();
|
||||||
|
let completedWork = [];
|
||||||
|
let entries = queue.length;
|
||||||
|
while (entries > 0) {
|
||||||
|
if (entries < concurrency) {
|
||||||
|
completedWork.push(yield Promise.all(workQueue));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
let promises = [];
|
||||||
|
let i;
|
||||||
|
for (i = 0; i < concurrency; i++) {
|
||||||
|
promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve())));
|
||||||
|
}
|
||||||
|
completedWork.push(yield Promise.all(promises));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return completedWork;
|
||||||
|
});
|
||||||
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const restClient = createRestClient();
|
const restClient = createRestClient();
|
||||||
|
@ -1629,13 +1651,14 @@ function saveCache(cacheId, archivePath) {
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
||||||
uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
|
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
}
|
}
|
||||||
fs.closeSync(fd);
|
|
||||||
core.debug("Awaiting all uploads");
|
core.debug("Awaiting all uploads");
|
||||||
|
const responses = yield parallelAwait(uploads, 4);
|
||||||
|
fs.closeSync(fd);
|
||||||
//const responses = await Promise.all(uploads);
|
//const responses = await Promise.all(uploads);
|
||||||
const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
if (failedResponse) {
|
if (failedResponse) {
|
||||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||||
}
|
}
|
||||||
|
|
29
dist/save/index.js
vendored
29
dist/save/index.js
vendored
|
@ -1615,6 +1615,28 @@ function commitCache(restClient, cacheId, filesize) {
|
||||||
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
function parallelAwait(queue, concurrency) {
|
||||||
|
var _a;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const workQueue = queue.reverse();
|
||||||
|
let completedWork = [];
|
||||||
|
let entries = queue.length;
|
||||||
|
while (entries > 0) {
|
||||||
|
if (entries < concurrency) {
|
||||||
|
completedWork.push(yield Promise.all(workQueue));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
let promises = [];
|
||||||
|
let i;
|
||||||
|
for (i = 0; i < concurrency; i++) {
|
||||||
|
promises.push((_a = workQueue.pop(), (_a !== null && _a !== void 0 ? _a : Promise.resolve())));
|
||||||
|
}
|
||||||
|
completedWork.push(yield Promise.all(promises));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return completedWork;
|
||||||
|
});
|
||||||
|
}
|
||||||
function saveCache(cacheId, archivePath) {
|
function saveCache(cacheId, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const restClient = createRestClient();
|
const restClient = createRestClient();
|
||||||
|
@ -1629,13 +1651,14 @@ function saveCache(cacheId, archivePath) {
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
||||||
uploads.push(yield uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
|
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
}
|
}
|
||||||
fs.closeSync(fd);
|
|
||||||
core.debug("Awaiting all uploads");
|
core.debug("Awaiting all uploads");
|
||||||
|
const responses = yield parallelAwait(uploads, 4);
|
||||||
|
fs.closeSync(fd);
|
||||||
//const responses = await Promise.all(uploads);
|
//const responses = await Promise.all(uploads);
|
||||||
const failedResponse = uploads.find(x => !isSuccessStatusCode(x.statusCode));
|
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||||
if (failedResponse) {
|
if (failedResponse) {
|
||||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||||
}
|
}
|
||||||
|
|
|
@ -174,6 +174,26 @@ async function commitCache(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function parallelAwait(queue: Promise<any>[], concurrency: number): Promise<any[]> {
|
||||||
|
const workQueue = queue.reverse();
|
||||||
|
let completedWork: any[] = [];
|
||||||
|
let entries = queue.length;
|
||||||
|
while (entries > 0) {
|
||||||
|
if (entries < concurrency) {
|
||||||
|
completedWork.push(await Promise.all(workQueue));
|
||||||
|
} else {
|
||||||
|
let promises: Promise<any>[] = [];
|
||||||
|
let i: number;
|
||||||
|
for (i = 0; i < concurrency; i++) {
|
||||||
|
promises.push(workQueue.pop() ?? Promise.resolve());
|
||||||
|
}
|
||||||
|
completedWork.push(await Promise.all(promises));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return completedWork;
|
||||||
|
}
|
||||||
|
|
||||||
export async function saveCache(
|
export async function saveCache(
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
|
@ -184,7 +204,7 @@ export async function saveCache(
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
||||||
const uploads: IRestResponse<void>[] = [];
|
const uploads: Promise<IRestResponse<void>>[] = [];
|
||||||
|
|
||||||
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
const fd = fs.openSync(archivePath, "r"); // Use the same fd for serial reads? Will this work for parallel too?
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
|
@ -192,16 +212,18 @@ export async function saveCache(
|
||||||
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
const chunk = fs.createReadStream(archivePath, { fd, start: offset, end, autoClose: false });
|
||||||
uploads.push(await uploadChunk(restClient, resourceUrl, chunk, offset, end)); // Making this serial
|
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset, end));
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
core.debug("Awaiting all uploads");
|
||||||
|
const responses = await parallelAwait(uploads, 4);
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
|
|
||||||
core.debug("Awaiting all uploads");
|
|
||||||
//const responses = await Promise.all(uploads);
|
//const responses = await Promise.all(uploads);
|
||||||
|
|
||||||
const failedResponse = uploads.find(
|
const failedResponse = responses.find(
|
||||||
x => !isSuccessStatusCode(x.statusCode)
|
x => !isSuccessStatusCode(x.statusCode)
|
||||||
);
|
);
|
||||||
if (failedResponse) {
|
if (failedResponse) {
|
||||||
|
|
Loading…
Reference in a new issue