mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-05 02:02:53 +01:00
Add backend ids
This commit is contained in:
parent
ae82de5d80
commit
67b144e17c
4 changed files with 200 additions and 84 deletions
71
dist/restore-only/index.js
vendored
71
dist/restore-only/index.js
vendored
|
@ -3183,6 +3183,7 @@ const upload_cache_1 = __nccwpck_require__(47107);
|
|||
const download_cache_1 = __nccwpck_require__(9049);
|
||||
const upload_zip_specification_1 = __nccwpck_require__(77643);
|
||||
const zip_1 = __nccwpck_require__(26699);
|
||||
const util_1 = __nccwpck_require__(49196);
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
|
@ -3322,9 +3323,12 @@ function restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
|||
checkKey(key);
|
||||
}
|
||||
try {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedDownloadURLRequest = {
|
||||
owner: "github",
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
keys: keys,
|
||||
};
|
||||
const signedDownloadURL = yield twirpClient.GetCachedBlob(getSignedDownloadURLRequest);
|
||||
|
@ -3439,8 +3443,12 @@ function saveCachev1(paths, key, options, enableCrossOsArchive = false) {
|
|||
}
|
||||
function saveCachev2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedUploadURL = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
organization: "github",
|
||||
keys: [key],
|
||||
};
|
||||
|
@ -3636,12 +3644,13 @@ const timestamp_1 = __nccwpck_require__(98983);
|
|||
class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCachedBlobRequest", [
|
||||
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { owner: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3652,10 +3661,13 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string owner */ 1:
|
||||
message.owner = reader.string();
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 3:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3670,12 +3682,15 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string owner = 1; */
|
||||
if (message.owner !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.owner);
|
||||
/* repeated string keys = 2; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* repeated string keys = 3; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
@ -3819,12 +3834,14 @@ exports.GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
|
|||
class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
|
||||
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { organization: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3835,10 +3852,16 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string organization */ 1:
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string organization */ 3:
|
||||
message.organization = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* repeated string keys */ 4:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3853,12 +3876,18 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string organization = 1; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string organization = 3; */
|
||||
if (message.organization !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 2; */
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 4; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
71
dist/restore/index.js
vendored
71
dist/restore/index.js
vendored
|
@ -3183,6 +3183,7 @@ const upload_cache_1 = __nccwpck_require__(47107);
|
|||
const download_cache_1 = __nccwpck_require__(9049);
|
||||
const upload_zip_specification_1 = __nccwpck_require__(77643);
|
||||
const zip_1 = __nccwpck_require__(26699);
|
||||
const util_1 = __nccwpck_require__(49196);
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
|
@ -3322,9 +3323,12 @@ function restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
|||
checkKey(key);
|
||||
}
|
||||
try {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedDownloadURLRequest = {
|
||||
owner: "github",
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
keys: keys,
|
||||
};
|
||||
const signedDownloadURL = yield twirpClient.GetCachedBlob(getSignedDownloadURLRequest);
|
||||
|
@ -3439,8 +3443,12 @@ function saveCachev1(paths, key, options, enableCrossOsArchive = false) {
|
|||
}
|
||||
function saveCachev2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedUploadURL = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
organization: "github",
|
||||
keys: [key],
|
||||
};
|
||||
|
@ -3636,12 +3644,13 @@ const timestamp_1 = __nccwpck_require__(98983);
|
|||
class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCachedBlobRequest", [
|
||||
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { owner: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3652,10 +3661,13 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string owner */ 1:
|
||||
message.owner = reader.string();
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 3:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3670,12 +3682,15 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string owner = 1; */
|
||||
if (message.owner !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.owner);
|
||||
/* repeated string keys = 2; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* repeated string keys = 3; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
@ -3819,12 +3834,14 @@ exports.GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
|
|||
class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
|
||||
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { organization: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3835,10 +3852,16 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string organization */ 1:
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string organization */ 3:
|
||||
message.organization = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* repeated string keys */ 4:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3853,12 +3876,18 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string organization = 1; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string organization = 3; */
|
||||
if (message.organization !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 2; */
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 4; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
71
dist/save-only/index.js
vendored
71
dist/save-only/index.js
vendored
|
@ -3183,6 +3183,7 @@ const upload_cache_1 = __nccwpck_require__(47107);
|
|||
const download_cache_1 = __nccwpck_require__(9049);
|
||||
const upload_zip_specification_1 = __nccwpck_require__(77643);
|
||||
const zip_1 = __nccwpck_require__(26699);
|
||||
const util_1 = __nccwpck_require__(49196);
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
|
@ -3322,9 +3323,12 @@ function restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
|||
checkKey(key);
|
||||
}
|
||||
try {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedDownloadURLRequest = {
|
||||
owner: "github",
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
keys: keys,
|
||||
};
|
||||
const signedDownloadURL = yield twirpClient.GetCachedBlob(getSignedDownloadURLRequest);
|
||||
|
@ -3439,8 +3443,12 @@ function saveCachev1(paths, key, options, enableCrossOsArchive = false) {
|
|||
}
|
||||
function saveCachev2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedUploadURL = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
organization: "github",
|
||||
keys: [key],
|
||||
};
|
||||
|
@ -3636,12 +3644,13 @@ const timestamp_1 = __nccwpck_require__(98983);
|
|||
class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCachedBlobRequest", [
|
||||
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { owner: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3652,10 +3661,13 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string owner */ 1:
|
||||
message.owner = reader.string();
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 3:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3670,12 +3682,15 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string owner = 1; */
|
||||
if (message.owner !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.owner);
|
||||
/* repeated string keys = 2; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* repeated string keys = 3; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
@ -3819,12 +3834,14 @@ exports.GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
|
|||
class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
|
||||
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { organization: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3835,10 +3852,16 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string organization */ 1:
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string organization */ 3:
|
||||
message.organization = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* repeated string keys */ 4:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3853,12 +3876,18 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string organization = 1; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string organization = 3; */
|
||||
if (message.organization !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 2; */
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 4; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
71
dist/save/index.js
vendored
71
dist/save/index.js
vendored
|
@ -3183,6 +3183,7 @@ const upload_cache_1 = __nccwpck_require__(47107);
|
|||
const download_cache_1 = __nccwpck_require__(9049);
|
||||
const upload_zip_specification_1 = __nccwpck_require__(77643);
|
||||
const zip_1 = __nccwpck_require__(26699);
|
||||
const util_1 = __nccwpck_require__(49196);
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
|
@ -3322,9 +3323,12 @@ function restoreCachev2(paths, primaryKey, restoreKeys, options, enableCrossOsAr
|
|||
checkKey(key);
|
||||
}
|
||||
try {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedDownloadURLRequest = {
|
||||
owner: "github",
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
keys: keys,
|
||||
};
|
||||
const signedDownloadURL = yield twirpClient.GetCachedBlob(getSignedDownloadURLRequest);
|
||||
|
@ -3439,8 +3443,12 @@ function saveCachev1(paths, key, options, enableCrossOsArchive = false) {
|
|||
}
|
||||
function saveCachev2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// BackendIds are retrieved form the signed JWT
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
const twirpClient = cacheTwirpClient.internalBlobCacheTwirpClient();
|
||||
const getSignedUploadURL = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
organization: "github",
|
||||
keys: [key],
|
||||
};
|
||||
|
@ -3636,12 +3644,13 @@ const timestamp_1 = __nccwpck_require__(98983);
|
|||
class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCachedBlobRequest", [
|
||||
{ no: 1, name: "owner", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { owner: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3652,10 +3661,13 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string owner */ 1:
|
||||
message.owner = reader.string();
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 3:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3670,12 +3682,15 @@ class GetCachedBlobRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string owner = 1; */
|
||||
if (message.owner !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.owner);
|
||||
/* repeated string keys = 2; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* repeated string keys = 3; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
@ -3819,12 +3834,14 @@ exports.GetCachedBlobResponse_Blob = new GetCachedBlobResponse_Blob$Type();
|
|||
class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheBlobUploadURLRequest", [
|
||||
{ no: 1, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "organization", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { organization: "", keys: [] };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", organization: "", keys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
|
@ -3835,10 +3852,16 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string organization */ 1:
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string organization */ 3:
|
||||
message.organization = reader.string();
|
||||
break;
|
||||
case /* repeated string keys */ 2:
|
||||
case /* repeated string keys */ 4:
|
||||
message.keys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
|
@ -3853,12 +3876,18 @@ class GetCacheBlobUploadURLRequest$Type extends runtime_5.MessageType {
|
|||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string organization = 1; */
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string organization = 3; */
|
||||
if (message.organization !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 2; */
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.organization);
|
||||
/* repeated string keys = 4; */
|
||||
for (let i = 0; i < message.keys.length; i++)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.keys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
Loading…
Reference in a new issue