mirror of
https://git.mirrors.martin98.com/https://github.com/actions/toolkit
synced 2025-07-31 02:12:00 +08:00
Update proto artifact interface, retrieve artifact digests, return indicator of mismatch failure
This commit is contained in:
parent
ec9716b3cc
commit
d5c8a0fa27
@ -319,14 +319,6 @@ describe('download-artifact', () => {
|
|||||||
|
|
||||||
const mockGet = jest.fn(async () => {
|
const mockGet = jest.fn(async () => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
// Resolve with a 200 status code immediately
|
|
||||||
resolve({
|
|
||||||
message: msg,
|
|
||||||
readBody: async () => {
|
|
||||||
return Promise.resolve(`{"ok": true}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Reject with an error after 31 seconds
|
// Reject with an error after 31 seconds
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
reject(new Error('Request timeout'))
|
reject(new Error('Request timeout'))
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
|
||||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||||
import {
|
import {
|
||||||
listArtifactsInternal,
|
listArtifactsInternal,
|
||||||
@ -10,13 +9,13 @@ import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
|||||||
import * as util from '../src/internal/shared/util'
|
import * as util from '../src/internal/shared/util'
|
||||||
import {noopLogs} from './common'
|
import {noopLogs} from './common'
|
||||||
import {Artifact} from '../src/internal/shared/interfaces'
|
import {Artifact} from '../src/internal/shared/interfaces'
|
||||||
|
import {RequestInterface} from '@octokit/types'
|
||||||
|
|
||||||
type MockedListWorkflowRunArtifacts = jest.MockedFunction<
|
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||||
RestEndpointMethods['actions']['listWorkflowRunArtifacts']
|
|
||||||
>
|
|
||||||
|
|
||||||
jest.mock('@actions/github', () => ({
|
jest.mock('@actions/github', () => ({
|
||||||
getOctokit: jest.fn().mockReturnValue({
|
getOctokit: jest.fn().mockReturnValue({
|
||||||
|
request: jest.fn(),
|
||||||
rest: {
|
rest: {
|
||||||
actions: {
|
actions: {
|
||||||
listWorkflowRunArtifacts: jest.fn()
|
listWorkflowRunArtifacts: jest.fn()
|
||||||
@ -81,10 +80,10 @@ describe('list-artifact', () => {
|
|||||||
|
|
||||||
describe('public', () => {
|
describe('public', () => {
|
||||||
it('should return a list of artifacts', async () => {
|
it('should return a list of artifacts', async () => {
|
||||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
const mockRequest = github.getOctokit(fixtures.token)
|
||||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
.request as MockedRequest
|
||||||
|
|
||||||
mockListArtifacts.mockResolvedValueOnce({
|
mockRequest.mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
headers: {},
|
headers: {},
|
||||||
url: '',
|
url: '',
|
||||||
@ -105,10 +104,10 @@ describe('list-artifact', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should return the latest artifact when latest is specified', async () => {
|
it('should return the latest artifact when latest is specified', async () => {
|
||||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
const mockRequest = github.getOctokit(fixtures.token)
|
||||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
.request as MockedRequest
|
||||||
|
|
||||||
mockListArtifacts.mockResolvedValueOnce({
|
mockRequest.mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
headers: {},
|
headers: {},
|
||||||
url: '',
|
url: '',
|
||||||
@ -129,10 +128,10 @@ describe('list-artifact', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('can return empty artifacts', async () => {
|
it('can return empty artifacts', async () => {
|
||||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
const mockRequest = github.getOctokit(fixtures.token)
|
||||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
.request as MockedRequest
|
||||||
|
|
||||||
mockListArtifacts.mockResolvedValueOnce({
|
mockRequest.mockResolvedValueOnce({
|
||||||
status: 200,
|
status: 200,
|
||||||
headers: {},
|
headers: {},
|
||||||
url: '',
|
url: '',
|
||||||
@ -156,10 +155,10 @@ describe('list-artifact', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should fail if non-200 response', async () => {
|
it('should fail if non-200 response', async () => {
|
||||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
const mockRequest = github.getOctokit(fixtures.token)
|
||||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
.request as MockedRequest
|
||||||
|
|
||||||
mockListArtifacts.mockRejectedValue(new Error('boom'))
|
mockRequest.mockRejectedValueOnce(new Error('boom'))
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
listArtifactsPublic(
|
listArtifactsPublic(
|
||||||
|
@ -15,6 +15,66 @@ import { MessageType } from "@protobuf-ts/runtime";
|
|||||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
export interface MigrateArtifactRequest {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||||
|
*/
|
||||||
|
workflowRunBackendId: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string name = 2;
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
|
||||||
|
*/
|
||||||
|
expiresAt?: Timestamp;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
export interface MigrateArtifactResponse {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: bool ok = 1;
|
||||||
|
*/
|
||||||
|
ok: boolean;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string signed_upload_url = 2;
|
||||||
|
*/
|
||||||
|
signedUploadUrl: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
export interface FinalizeMigratedArtifactRequest {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||||
|
*/
|
||||||
|
workflowRunBackendId: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: string name = 2;
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: int64 size = 3;
|
||||||
|
*/
|
||||||
|
size: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
export interface FinalizeMigratedArtifactResponse {
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: bool ok = 1;
|
||||||
|
*/
|
||||||
|
ok: boolean;
|
||||||
|
/**
|
||||||
|
* @generated from protobuf field: int64 artifact_id = 2;
|
||||||
|
*/
|
||||||
|
artifactId: string;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||||
*/
|
*/
|
||||||
@ -169,6 +229,12 @@ export interface ListArtifactsResponse_MonolithArtifact {
|
|||||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||||
*/
|
*/
|
||||||
createdAt?: Timestamp;
|
createdAt?: Timestamp;
|
||||||
|
/**
|
||||||
|
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
|
||||||
|
*
|
||||||
|
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
|
||||||
|
*/
|
||||||
|
digest?: StringValue;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||||
@ -227,6 +293,236 @@ export interface DeleteArtifactResponse {
|
|||||||
artifactId: string;
|
artifactId: string;
|
||||||
}
|
}
|
||||||
// @generated message type with reflection information, may provide speed optimized methods
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest {
|
||||||
|
const message = { workflowRunBackendId: "", name: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<MigrateArtifactRequest>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||||
|
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||||
|
/* google.protobuf.Timestamp expires_at = 3; */
|
||||||
|
if (message.expiresAt)
|
||||||
|
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||||
|
*/
|
||||||
|
export const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse {
|
||||||
|
const message = { ok: false, signedUploadUrl: "" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<MigrateArtifactResponse>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* string signed_upload_url */ 2:
|
||||||
|
message.signedUploadUrl = reader.string();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||||
|
/* string signed_upload_url = 2; */
|
||||||
|
if (message.signedUploadUrl !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||||
|
*/
|
||||||
|
export const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||||
|
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
|
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest {
|
||||||
|
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<FinalizeMigratedArtifactRequest>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* string workflow_run_backend_id */ 1:
|
||||||
|
message.workflowRunBackendId = reader.string();
|
||||||
|
break;
|
||||||
|
case /* string name */ 2:
|
||||||
|
message.name = reader.string();
|
||||||
|
break;
|
||||||
|
case /* int64 size */ 3:
|
||||||
|
message.size = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* string workflow_run_backend_id = 1; */
|
||||||
|
if (message.workflowRunBackendId !== "")
|
||||||
|
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||||
|
/* string name = 2; */
|
||||||
|
if (message.name !== "")
|
||||||
|
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||||
|
/* int64 size = 3; */
|
||||||
|
if (message.size !== "0")
|
||||||
|
writer.tag(3, WireType.Varint).int64(message.size);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||||
|
*/
|
||||||
|
export const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
|
class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
|
||||||
|
constructor() {
|
||||||
|
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||||
|
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||||
|
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse {
|
||||||
|
const message = { ok: false, artifactId: "0" };
|
||||||
|
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||||
|
if (value !== undefined)
|
||||||
|
reflectionMergePartial<FinalizeMigratedArtifactResponse>(this, message, value);
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse {
|
||||||
|
let message = target ?? this.create(), end = reader.pos + length;
|
||||||
|
while (reader.pos < end) {
|
||||||
|
let [fieldNo, wireType] = reader.tag();
|
||||||
|
switch (fieldNo) {
|
||||||
|
case /* bool ok */ 1:
|
||||||
|
message.ok = reader.bool();
|
||||||
|
break;
|
||||||
|
case /* int64 artifact_id */ 2:
|
||||||
|
message.artifactId = reader.int64().toString();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
let u = options.readUnknownField;
|
||||||
|
if (u === "throw")
|
||||||
|
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||||
|
let d = reader.skip(wireType);
|
||||||
|
if (u !== false)
|
||||||
|
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||||
|
/* bool ok = 1; */
|
||||||
|
if (message.ok !== false)
|
||||||
|
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||||
|
/* int64 artifact_id = 2; */
|
||||||
|
if (message.artifactId !== "0")
|
||||||
|
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||||
|
let u = options.writeUnknownFields;
|
||||||
|
if (u !== false)
|
||||||
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
|
return writer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||||
|
*/
|
||||||
|
export const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||||
|
// @generated message type with reflection information, may provide speed optimized methods
|
||||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||||
@ -608,7 +904,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
||||||
|
{ no: 7, name: "digest", kind: "message", T: () => StringValue }
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||||
@ -641,6 +938,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||||||
case /* google.protobuf.Timestamp created_at */ 6:
|
case /* google.protobuf.Timestamp created_at */ 6:
|
||||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||||
break;
|
break;
|
||||||
|
case /* google.protobuf.StringValue digest */ 7:
|
||||||
|
message.digest = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
let u = options.readUnknownField;
|
let u = options.readUnknownField;
|
||||||
if (u === "throw")
|
if (u === "throw")
|
||||||
@ -671,6 +971,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||||||
/* google.protobuf.Timestamp created_at = 6; */
|
/* google.protobuf.Timestamp created_at = 6; */
|
||||||
if (message.createdAt)
|
if (message.createdAt)
|
||||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||||
|
/* google.protobuf.StringValue digest = 7; */
|
||||||
|
if (message.digest)
|
||||||
|
StringValue.internalBinaryWrite(message.digest, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
||||||
let u = options.writeUnknownFields;
|
let u = options.writeUnknownFields;
|
||||||
if (u !== false)
|
if (u !== false)
|
||||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||||
@ -912,5 +1215,7 @@ export const ArtifactService = new ServiceType("github.actions.results.api.v1.Ar
|
|||||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse }
|
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse },
|
||||||
]);
|
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
|
||||||
|
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
|
||||||
|
]);
|
@ -1,11 +1,15 @@
|
|||||||
import fs from 'fs/promises'
|
import fs from 'fs/promises'
|
||||||
|
import * as crypto from 'crypto'
|
||||||
|
import * as stream from 'stream'
|
||||||
|
|
||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as httpClient from '@actions/http-client'
|
import * as httpClient from '@actions/http-client'
|
||||||
import unzip from 'unzip-stream'
|
import unzip from 'unzip-stream'
|
||||||
import {
|
import {
|
||||||
DownloadArtifactOptions,
|
DownloadArtifactOptions,
|
||||||
DownloadArtifactResponse
|
DownloadArtifactResponse,
|
||||||
|
StreamExtractResponse
|
||||||
} from '../shared/interfaces'
|
} from '../shared/interfaces'
|
||||||
import {getUserAgentString} from '../shared/user-agent'
|
import {getUserAgentString} from '../shared/user-agent'
|
||||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||||
@ -37,12 +41,14 @@ async function exists(path: string): Promise<boolean> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function streamExtract(url: string, directory: string): Promise<void> {
|
async function streamExtract(
|
||||||
|
url: string,
|
||||||
|
directory: string
|
||||||
|
): Promise<StreamExtractResponse> {
|
||||||
let retryCount = 0
|
let retryCount = 0
|
||||||
while (retryCount < 5) {
|
while (retryCount < 5) {
|
||||||
try {
|
try {
|
||||||
await streamExtractExternal(url, directory)
|
return await streamExtractExternal(url, directory)
|
||||||
return
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
retryCount++
|
retryCount++
|
||||||
core.debug(
|
core.debug(
|
||||||
@ -59,7 +65,7 @@ async function streamExtract(url: string, directory: string): Promise<void> {
|
|||||||
export async function streamExtractExternal(
|
export async function streamExtractExternal(
|
||||||
url: string,
|
url: string,
|
||||||
directory: string
|
directory: string
|
||||||
): Promise<void> {
|
): Promise<StreamExtractResponse> {
|
||||||
const client = new httpClient.HttpClient(getUserAgentString())
|
const client = new httpClient.HttpClient(getUserAgentString())
|
||||||
const response = await client.get(url)
|
const response = await client.get(url)
|
||||||
if (response.message.statusCode !== 200) {
|
if (response.message.statusCode !== 200) {
|
||||||
@ -69,6 +75,7 @@ export async function streamExtractExternal(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const timeout = 30 * 1000 // 30 seconds
|
const timeout = 30 * 1000 // 30 seconds
|
||||||
|
let sha256Digest: string | undefined = undefined
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const timerFn = (): void => {
|
const timerFn = (): void => {
|
||||||
@ -78,7 +85,14 @@ export async function streamExtractExternal(
|
|||||||
}
|
}
|
||||||
const timer = setTimeout(timerFn, timeout)
|
const timer = setTimeout(timerFn, timeout)
|
||||||
|
|
||||||
response.message
|
const hashStream = crypto.createHash('sha256').setEncoding('hex')
|
||||||
|
const passThrough = new stream.PassThrough()
|
||||||
|
|
||||||
|
response.message.pipe(passThrough)
|
||||||
|
passThrough.pipe(hashStream)
|
||||||
|
const extractStream = passThrough
|
||||||
|
|
||||||
|
extractStream
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh()
|
timer.refresh()
|
||||||
})
|
})
|
||||||
@ -92,7 +106,14 @@ export async function streamExtractExternal(
|
|||||||
.pipe(unzip.Extract({path: directory}))
|
.pipe(unzip.Extract({path: directory}))
|
||||||
.on('close', () => {
|
.on('close', () => {
|
||||||
clearTimeout(timer)
|
clearTimeout(timer)
|
||||||
resolve()
|
if (hashStream) {
|
||||||
|
hashStream.end()
|
||||||
|
sha256Digest = hashStream.read() as string
|
||||||
|
core.debug(
|
||||||
|
`SHA256 digest of downloaded artifact zip is ${sha256Digest}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
resolve({sha256Digest: `sha256:${sha256Digest}`})
|
||||||
})
|
})
|
||||||
.on('error', (error: Error) => {
|
.on('error', (error: Error) => {
|
||||||
reject(error)
|
reject(error)
|
||||||
@ -111,6 +132,8 @@ export async function downloadArtifactPublic(
|
|||||||
|
|
||||||
const api = github.getOctokit(token)
|
const api = github.getOctokit(token)
|
||||||
|
|
||||||
|
let digestMismatch = false
|
||||||
|
|
||||||
core.info(
|
core.info(
|
||||||
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
||||||
)
|
)
|
||||||
@ -140,13 +163,20 @@ export async function downloadArtifactPublic(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||||
await streamExtract(location, downloadPath)
|
const extractResponse = await streamExtract(location, downloadPath)
|
||||||
core.info(`Artifact download completed successfully.`)
|
core.info(`Artifact download completed successfully.`)
|
||||||
|
if (options?.expectedHash) {
|
||||||
|
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return {downloadPath}
|
return {downloadPath, digestMismatch}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function downloadArtifactInternal(
|
export async function downloadArtifactInternal(
|
||||||
@ -157,6 +187,8 @@ export async function downloadArtifactInternal(
|
|||||||
|
|
||||||
const artifactClient = internalArtifactTwirpClient()
|
const artifactClient = internalArtifactTwirpClient()
|
||||||
|
|
||||||
|
let digestMismatch = false
|
||||||
|
|
||||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||||
getBackendIdsFromToken()
|
getBackendIdsFromToken()
|
||||||
|
|
||||||
@ -192,13 +224,20 @@ export async function downloadArtifactInternal(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||||
await streamExtract(signedUrl, downloadPath)
|
const extractResponse = await streamExtract(signedUrl, downloadPath)
|
||||||
core.info(`Artifact download completed successfully.`)
|
core.info(`Artifact download completed successfully.`)
|
||||||
|
if (options?.expectedHash) {
|
||||||
|
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||||
|
digestMismatch = true
|
||||||
|
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||||
|
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return {downloadPath}
|
return {downloadPath, digestMismatch}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function resolveOrCreateDirectory(
|
async function resolveOrCreateDirectory(
|
||||||
|
@ -68,7 +68,10 @@ export async function getArtifactPublic(
|
|||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: artifact.digest
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -115,7 +118,8 @@ export async function getArtifactInternal(
|
|||||||
size: Number(artifact.size),
|
size: Number(artifact.size),
|
||||||
createdAt: artifact.createdAt
|
createdAt: artifact.createdAt
|
||||||
? Timestamp.toDate(artifact.createdAt)
|
? Timestamp.toDate(artifact.createdAt)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: artifact.digest?.value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,14 +41,17 @@ export async function listArtifactsPublic(
|
|||||||
const github = getOctokit(token, opts, retry, requestLog)
|
const github = getOctokit(token, opts, retry, requestLog)
|
||||||
|
|
||||||
let currentPageNumber = 1
|
let currentPageNumber = 1
|
||||||
const {data: listArtifactResponse} =
|
|
||||||
await github.rest.actions.listWorkflowRunArtifacts({
|
const {data: listArtifactResponse} = await github.request(
|
||||||
|
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||||
|
{
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
per_page: paginationCount,
|
per_page: paginationCount,
|
||||||
page: currentPageNumber
|
page: currentPageNumber
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
let numberOfPages = Math.ceil(
|
let numberOfPages = Math.ceil(
|
||||||
listArtifactResponse.total_count / paginationCount
|
listArtifactResponse.total_count / paginationCount
|
||||||
@ -67,7 +70,10 @@ export async function listArtifactsPublic(
|
|||||||
name: artifact.name,
|
name: artifact.name,
|
||||||
id: artifact.id,
|
id: artifact.id,
|
||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
createdAt: artifact.created_at
|
||||||
|
? new Date(artifact.created_at)
|
||||||
|
: undefined,
|
||||||
|
digest: (artifact as ArtifactResponse).digest
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,14 +86,16 @@ export async function listArtifactsPublic(
|
|||||||
currentPageNumber++
|
currentPageNumber++
|
||||||
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
||||||
|
|
||||||
const {data: listArtifactResponse} =
|
const {data: listArtifactResponse} = await github.request(
|
||||||
await github.rest.actions.listWorkflowRunArtifacts({
|
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||||
|
{
|
||||||
owner: repositoryOwner,
|
owner: repositoryOwner,
|
||||||
repo: repositoryName,
|
repo: repositoryName,
|
||||||
run_id: workflowRunId,
|
run_id: workflowRunId,
|
||||||
per_page: paginationCount,
|
per_page: paginationCount,
|
||||||
page: currentPageNumber
|
page: currentPageNumber
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
for (const artifact of listArtifactResponse.artifacts) {
|
for (const artifact of listArtifactResponse.artifacts) {
|
||||||
artifacts.push({
|
artifacts.push({
|
||||||
@ -96,7 +104,8 @@ export async function listArtifactsPublic(
|
|||||||
size: artifact.size_in_bytes,
|
size: artifact.size_in_bytes,
|
||||||
createdAt: artifact.created_at
|
createdAt: artifact.created_at
|
||||||
? new Date(artifact.created_at)
|
? new Date(artifact.created_at)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: (artifact as ArtifactResponse).digest
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -132,7 +141,8 @@ export async function listArtifactsInternal(
|
|||||||
size: Number(artifact.size),
|
size: Number(artifact.size),
|
||||||
createdAt: artifact.createdAt
|
createdAt: artifact.createdAt
|
||||||
? Timestamp.toDate(artifact.createdAt)
|
? Timestamp.toDate(artifact.createdAt)
|
||||||
: undefined
|
: undefined,
|
||||||
|
digest: artifact.digest?.value
|
||||||
}))
|
}))
|
||||||
|
|
||||||
if (latest) {
|
if (latest) {
|
||||||
@ -146,6 +156,18 @@ export async function listArtifactsInternal(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This exists so that we don't have to use 'any' when receiving the artifact list from the GitHub API.
|
||||||
|
* The digest field is not present in OpenAPI/types at time of writing, which necessitates this change.
|
||||||
|
*/
|
||||||
|
interface ArtifactResponse {
|
||||||
|
name: string
|
||||||
|
id: number
|
||||||
|
size_in_bytes: number
|
||||||
|
created_at?: string
|
||||||
|
digest?: string
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filters a list of artifacts to only include the latest artifact for each name
|
* Filters a list of artifacts to only include the latest artifact for each name
|
||||||
* @param artifacts The artifacts to filter
|
* @param artifacts The artifacts to filter
|
||||||
|
@ -91,6 +91,11 @@ export interface DownloadArtifactResponse {
|
|||||||
* The path where the artifact was downloaded to
|
* The path where the artifact was downloaded to
|
||||||
*/
|
*/
|
||||||
downloadPath?: string
|
downloadPath?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if the digest of the downloaded artifact does not match the expected hash
|
||||||
|
*/
|
||||||
|
digestMismatch?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -101,6 +106,19 @@ export interface DownloadArtifactOptions {
|
|||||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||||
*/
|
*/
|
||||||
path?: string
|
path?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The hash that was computed for the artifact during upload. Don't provide this unless you want to verify the hash.
|
||||||
|
* If the hash doesn't match, the download will fail.
|
||||||
|
*/
|
||||||
|
expectedHash?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StreamExtractResponse {
|
||||||
|
/**
|
||||||
|
* The SHA256 hash of the downloaded file
|
||||||
|
*/
|
||||||
|
sha256Digest?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -126,6 +144,11 @@ export interface Artifact {
|
|||||||
* The time when the artifact was created
|
* The time when the artifact was created
|
||||||
*/
|
*/
|
||||||
createdAt?: Date
|
createdAt?: Date
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The digest of the artifact, computed at time of upload.
|
||||||
|
*/
|
||||||
|
digest?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user