Cloud runner develop v0.1 (#395)
* Correct aws logs link * Correct aws logs link * better aws cli commands and better cleanup for aws * better aws cli commands and better cleanup for aws * improved garbage collection cli options * Only allow ephemeral runners when using cloud runner integration tests flag to avoid unexpected hangup * Only allow ephemeral runners when using cloud runner integration tests flag to avoid unexpected hangup * fix issue #393 * Extract follow log stream service * consolidate into one pipeline file * consolidate into one pipeline file
This commit is contained in:
321
dist/index.js
generated
vendored
321
dist/index.js
generated
vendored
@@ -231,7 +231,7 @@ class BuildParameters {
|
||||
// Todo - Don't use process.env directly, that's what the input model class is for.
|
||||
// ---
|
||||
let unitySerial = '';
|
||||
if (!process.env.UNITY_SERIAL && input_1.default.githubInputEnabled && cli_1.Cli.options === undefined) {
|
||||
if (!process.env.UNITY_SERIAL && input_1.default.githubInputEnabled) {
|
||||
// No serial was present, so it is a personal license that we need to convert
|
||||
if (!process.env.UNITY_LICENSE) {
|
||||
throw new Error(`Missing Unity License File and no Serial was found. If this
|
||||
@@ -1200,8 +1200,8 @@ const zlib = __importStar(__nccwpck_require__(59796));
|
||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||
const __1 = __nccwpck_require__(41359);
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
const cloud_runner_statics_1 = __nccwpck_require__(90828);
|
||||
const cloud_runner_build_command_process_1 = __nccwpck_require__(71899);
|
||||
const follow_log_stream_service_1 = __nccwpck_require__(64121);
|
||||
class AWSTaskRunner {
|
||||
static runTask(taskDef, ECS, CF, environment, buildGuid, commands) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;
|
||||
@@ -1293,8 +1293,8 @@ class AWSTaskRunner {
|
||||
const kinesis = new AWS.Kinesis();
|
||||
const stream = yield AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
||||
let iterator = yield AWSTaskRunner.getLogIterator(kinesis, stream);
|
||||
const logBaseUrl = `https://${__1.Input.region}.console.aws.amazon.com/cloudwatch/home?region=${CF.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
cloud_runner_logger_1.default.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
const logBaseUrl = `https://${__1.Input.region}.console.aws.amazon.com/cloudwatch/home?region=${__1.Input.region}#logsV2:log-groups/log-group/${cloud_runner_1.default.buildParameters.awsBaseStackName}-${cloud_runner_1.default.buildParameters.buildGuid}`;
|
||||
cloud_runner_logger_1.default.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let shouldReadLogs = true;
|
||||
let shouldCleanup = true;
|
||||
let timestamp = 0;
|
||||
@@ -1343,34 +1343,8 @@ class AWSTaskRunner {
|
||||
const json = JSON.parse(zlib.gunzipSync(Buffer.from(records.Records[index].Data, 'base64')).toString('utf8'));
|
||||
if (json.messageType === 'DATA_MESSAGE') {
|
||||
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
||||
let message = json.logEvents[logEventsIndex].message;
|
||||
if (json.logEvents[logEventsIndex].message.includes(`---${cloud_runner_1.default.buildParameters.logId}`)) {
|
||||
cloud_runner_logger_1.default.log('End of log transmission received');
|
||||
shouldReadLogs = false;
|
||||
}
|
||||
else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||
core.warning('LIBRARY NOT FOUND!');
|
||||
core.setOutput('library-found', 'false');
|
||||
}
|
||||
else if (message.includes('Build succeeded')) {
|
||||
core.setOutput('build-result', 'success');
|
||||
}
|
||||
else if (message.includes('Build fail')) {
|
||||
core.setOutput('build-result', 'failed');
|
||||
core.setFailed('unity build failed');
|
||||
core.error('BUILD FAILED!');
|
||||
}
|
||||
else if (message.includes(': Listening for Jobs')) {
|
||||
core.setOutput('cloud runner stop watching', 'true');
|
||||
shouldReadLogs = false;
|
||||
shouldCleanup = false;
|
||||
core.warning('cloud runner stop watching');
|
||||
}
|
||||
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
||||
output += message;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(message);
|
||||
const message = json.logEvents[logEventsIndex].message;
|
||||
({ shouldReadLogs, shouldCleanup, output } = follow_log_stream_service_1.FollowLogStreamService.handleIteration(message, shouldReadLogs, shouldCleanup, output));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1413,8 +1387,9 @@ exports.BaseStackFormation = void 0;
|
||||
class BaseStackFormation {
|
||||
}
|
||||
exports.BaseStackFormation = BaseStackFormation;
|
||||
BaseStackFormation.baseStackDecription = `Game-CI base stack`;
|
||||
BaseStackFormation.formation = `AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: Game-CI base stack
|
||||
Description: ${BaseStackFormation.baseStackDecription}
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
@@ -1816,11 +1791,9 @@ exports.TaskDefinitionFormation = void 0;
|
||||
class TaskDefinitionFormation {
|
||||
}
|
||||
exports.TaskDefinitionFormation = TaskDefinitionFormation;
|
||||
TaskDefinitionFormation.description = `Game CI Cloud Runner Task Stack`;
|
||||
TaskDefinitionFormation.formation = `AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: >-
|
||||
AWS Fargate cluster that can span public and private subnets. Supports public
|
||||
facing load balancers, private internal load balancers, and both internal and
|
||||
external service discovery namespaces.
|
||||
Description: ${TaskDefinitionFormation.description}
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
@@ -2005,59 +1978,13 @@ const aws_sdk_1 = __importDefault(__nccwpck_require__(71786));
|
||||
const cli_functions_repository_1 = __nccwpck_require__(85301);
|
||||
const input_1 = __importDefault(__nccwpck_require__(91933));
|
||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||
const base_stack_formation_1 = __nccwpck_require__(29643);
|
||||
class AwsCliCommands {
|
||||
static awsListStacks(perResultCallback) {
|
||||
var _a;
|
||||
static awsListAll() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
const CF = new aws_sdk_1.default.CloudFormation();
|
||||
const stacks = ((_a = (yield CF.listStacks().promise()).StackSummaries) === null || _a === void 0 ? void 0 : _a.filter((_x) => _x.StackStatus !== 'DELETE_COMPLETE')) || [];
|
||||
cloud_runner_logger_1.default.log(`DescribeStacksRequest ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
cloud_runner_logger_1.default.log(JSON.stringify(element, undefined, 4));
|
||||
cloud_runner_logger_1.default.log(`${element.StackName}`);
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(element);
|
||||
}
|
||||
if (stacks === undefined) {
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
static awsListTasks(perResultCallback) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
cloud_runner_logger_1.default.log(`ECS Clusters`);
|
||||
const ecs = new aws_sdk_1.default.ECS();
|
||||
const clusters = (yield ecs.listClusters().promise()).clusterArns || [];
|
||||
for (const element of clusters) {
|
||||
const input = {
|
||||
cluster: element,
|
||||
};
|
||||
const list = (yield ecs.listTasks(input).promise()).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput = { tasks: list, cluster: element };
|
||||
const describeList = (yield ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||
if (describeList === []) {
|
||||
continue;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`DescribeTasksRequest ${describeList.length}`);
|
||||
for (const taskElement of describeList) {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
cloud_runner_logger_1.default.log(JSON.stringify(taskElement, undefined, 4));
|
||||
if (taskElement.createdAt === undefined) {
|
||||
cloud_runner_logger_1.default.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(taskElement, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
yield AwsCliCommands.awsListStacks(undefined, true);
|
||||
yield AwsCliCommands.awsListTasks();
|
||||
yield AwsCliCommands.awsListLogGroups(undefined, true);
|
||||
});
|
||||
}
|
||||
static garbageCollectAws() {
|
||||
@@ -2072,32 +1999,149 @@ class AwsCliCommands {
|
||||
}
|
||||
static garbageCollectAwsAllOlderThanOneDay() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield AwsCliCommands.cleanup(true);
|
||||
yield AwsCliCommands.cleanup(true, true);
|
||||
});
|
||||
}
|
||||
static cleanup(deleteResources = false) {
|
||||
static isOlderThan1day(date) {
|
||||
const ageDate = new Date(date.getTime() - Date.now());
|
||||
return ageDate.getDay() > 0;
|
||||
}
|
||||
static awsListStacks(perResultCallback = false, verbose = false) {
|
||||
var _a, _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
const CF = new aws_sdk_1.default.CloudFormation();
|
||||
const stacks = ((_a = (yield CF.listStacks().promise()).StackSummaries) === null || _a === void 0 ? void 0 : _a.filter((_x) => _x.StackStatus !== 'DELETE_COMPLETE')) || [];
|
||||
cloud_runner_logger_1.default.log(`Stacks ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
cloud_runner_logger_1.default.log(`Task Stack ${element.StackName} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()}`);
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(element);
|
||||
}
|
||||
const baseStacks = ((_b = (yield CF.listStacks().promise()).StackSummaries) === null || _b === void 0 ? void 0 : _b.filter((_x) => _x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === base_stack_formation_1.BaseStackFormation.baseStackDecription)) || [];
|
||||
cloud_runner_logger_1.default.log(`Base Stacks ${baseStacks.length}`);
|
||||
for (const element of baseStacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
cloud_runner_logger_1.default.log(`Base Stack ${element.StackName} - Age D${ageDate.getHours()} H${ageDate.getHours()} M${ageDate.getMinutes()}`);
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(element);
|
||||
}
|
||||
if (stacks === undefined) {
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
static awsListTasks(perResultCallback = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
const ecs = new aws_sdk_1.default.ECS();
|
||||
const clusters = (yield ecs.listClusters().promise()).clusterArns || [];
|
||||
cloud_runner_logger_1.default.log(`Clusters ${clusters.length}`);
|
||||
for (const element of clusters) {
|
||||
const input = {
|
||||
cluster: element,
|
||||
};
|
||||
const list = (yield ecs.listTasks(input).promise()).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput = { tasks: list, cluster: element };
|
||||
const describeList = (yield ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||
if (describeList === []) {
|
||||
continue;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`Tasks ${describeList.length}`);
|
||||
for (const taskElement of describeList) {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
if (taskElement.createdAt === undefined) {
|
||||
cloud_runner_logger_1.default.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(taskElement, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
static awsListLogGroups(perResultCallback = false, verbose = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
const ecs = new aws_sdk_1.default.CloudWatchLogs();
|
||||
let logStreamInput = {
|
||||
/* logGroupNamePrefix: 'game-ci' */
|
||||
};
|
||||
let logGroupsDescribe = yield ecs.describeLogGroups(logStreamInput).promise();
|
||||
const logGroups = logGroupsDescribe.logGroups || [];
|
||||
while (logGroupsDescribe.nextToken) {
|
||||
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
||||
logGroupsDescribe = yield ecs.describeLogGroups(logStreamInput).promise();
|
||||
logGroups.push(...((logGroupsDescribe === null || logGroupsDescribe === void 0 ? void 0 : logGroupsDescribe.logGroups) || []));
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`Log Groups ${logGroups.length}`);
|
||||
for (const element of logGroups) {
|
||||
if (element.creationTime === undefined) {
|
||||
cloud_runner_logger_1.default.log(`Skipping ${element.logGroupName} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
const ageDate = new Date(new Date(element.creationTime).getTime() - Date.now());
|
||||
if (verbose)
|
||||
cloud_runner_logger_1.default.log(`Log Group Name ${element.logGroupName} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()} - 1d old ${AwsCliCommands.isOlderThan1day(new Date(element.creationTime))}`);
|
||||
if (perResultCallback)
|
||||
yield perResultCallback(element, element);
|
||||
}
|
||||
});
|
||||
}
|
||||
static cleanup(deleteResources = false, OneDayOlderOnly = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.env.AWS_REGION = input_1.default.region;
|
||||
const CF = new aws_sdk_1.default.CloudFormation();
|
||||
const ecs = new aws_sdk_1.default.ECS();
|
||||
const cwl = new aws_sdk_1.default.CloudWatchLogs();
|
||||
yield AwsCliCommands.awsListStacks((element) => __awaiter(this, void 0, void 0, function* () {
|
||||
if (deleteResources) {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(element.CreationTime))) {
|
||||
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
||||
cloud_runner_logger_1.default.log(`Skipping ${element.StackName} ignore list`);
|
||||
return;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(`Deleting ${element.logGroupName}`);
|
||||
const deleteStackInput = { StackName: element.StackName };
|
||||
yield CF.deleteStack(deleteStackInput).promise();
|
||||
}
|
||||
}));
|
||||
yield AwsCliCommands.awsListTasks((taskElement, element) => __awaiter(this, void 0, void 0, function* () {
|
||||
if (deleteResources) {
|
||||
var _a;
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(taskElement.CreatedAt))) {
|
||||
cloud_runner_logger_1.default.log(`Stopping task ${(_a = taskElement.containers) === null || _a === void 0 ? void 0 : _a[0].name}`);
|
||||
yield ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
||||
}
|
||||
}));
|
||||
yield AwsCliCommands.awsListLogGroups((element) => __awaiter(this, void 0, void 0, function* () {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(new Date(element.createdAt)))) {
|
||||
cloud_runner_logger_1.default.log(`Deleting ${element.logGroupName}`);
|
||||
yield cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
||||
}
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-list-all`, `List all resources`)
|
||||
], AwsCliCommands, "awsListAll", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect`, `garbage collect aws resources not in use !WIP!`)
|
||||
], AwsCliCommands, "garbageCollectAws", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect-all`, `garbage collect aws resources regardless of whether they are in use`)
|
||||
], AwsCliCommands, "garbageCollectAwsAll", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect-all-1d-older`, `garbage collect aws resources created more than 1d ago (ignore if they are in use)`)
|
||||
], AwsCliCommands, "garbageCollectAwsAllOlderThanOneDay", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-list-stacks`, `List stacks`)
|
||||
], AwsCliCommands, "awsListStacks", null);
|
||||
@@ -2105,14 +2149,8 @@ __decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-list-tasks`, `List tasks`)
|
||||
], AwsCliCommands, "awsListTasks", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect`, `garbage collect aws`)
|
||||
], AwsCliCommands, "garbageCollectAws", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect-all`, `garbage collect aws`)
|
||||
], AwsCliCommands, "garbageCollectAwsAll", null);
|
||||
__decorate([
|
||||
cli_functions_repository_1.CliFunction(`aws-garbage-collect-all-1d-older`, `garbage collect aws`)
|
||||
], AwsCliCommands, "garbageCollectAwsAllOlderThanOneDay", null);
|
||||
cli_functions_repository_1.CliFunction(`aws-list-log-groups`, `List tasks`)
|
||||
], AwsCliCommands, "awsListLogGroups", null);
|
||||
exports.AwsCliCommands = AwsCliCommands;
|
||||
|
||||
|
||||
@@ -2347,7 +2385,7 @@ class Kubernetes {
|
||||
try {
|
||||
yield kubernetes_task_runner_1.default.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||
cloud_runner_logger_1.default.log('Pod running, streaming logs');
|
||||
output = yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace, cloud_runner_logger_1.default.log);
|
||||
output = yield kubernetes_task_runner_1.default.runTask(this.kubeConfig, this.kubeClient, this.jobName, this.podName, 'main', this.namespace);
|
||||
break;
|
||||
}
|
||||
catch (error) {
|
||||
@@ -2882,22 +2920,21 @@ const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const cloud_runner_statics_1 = __nccwpck_require__(90828);
|
||||
const async_wait_until_1 = __importDefault(__nccwpck_require__(41299));
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
const follow_log_stream_service_1 = __nccwpck_require__(64121);
|
||||
class KubernetesTaskRunner {
|
||||
static runTask(kubeConfig, kubeClient, jobName, podName, containerName, namespace, logCallback) {
|
||||
static runTask(kubeConfig, kubeClient, jobName, podName, containerName, namespace) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
cloud_runner_logger_1.default.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace}`);
|
||||
const stream = new stream_1.Writable();
|
||||
let output = '';
|
||||
let didStreamAnyLogs = false;
|
||||
let shouldReadLogs = true;
|
||||
let shouldCleanup = true;
|
||||
stream._write = (chunk, encoding, next) => {
|
||||
didStreamAnyLogs = true;
|
||||
let message = chunk.toString().trimRight(`\n`);
|
||||
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
||||
output += message;
|
||||
}
|
||||
logCallback(message);
|
||||
({ shouldReadLogs, shouldCleanup, output } = follow_log_stream_service_1.FollowLogStreamService.handleIteration(message, shouldReadLogs, shouldCleanup, output));
|
||||
next();
|
||||
};
|
||||
const logOptions = {
|
||||
@@ -3869,6 +3906,76 @@ class DependencyOverrideService {
|
||||
exports["default"] = DependencyOverrideService;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 64121:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.FollowLogStreamService = void 0;
|
||||
const cloud_runner_logger_1 = __importDefault(__nccwpck_require__(22855));
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const cloud_runner_1 = __importDefault(__nccwpck_require__(79144));
|
||||
const cloud_runner_statics_1 = __nccwpck_require__(90828);
|
||||
class FollowLogStreamService {
|
||||
static handleIteration(message, shouldReadLogs, shouldCleanup, output) {
|
||||
if (message.includes(`---${cloud_runner_1.default.buildParameters.logId}`)) {
|
||||
cloud_runner_logger_1.default.log('End of log transmission received');
|
||||
shouldReadLogs = false;
|
||||
}
|
||||
else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||
core.warning('LIBRARY NOT FOUND!');
|
||||
core.setOutput('library-found', 'false');
|
||||
}
|
||||
else if (message.includes('Build succeeded')) {
|
||||
core.setOutput('build-result', 'success');
|
||||
}
|
||||
else if (message.includes('Build fail')) {
|
||||
core.setOutput('build-result', 'failed');
|
||||
core.setFailed('unity build failed');
|
||||
core.error('BUILD FAILED!');
|
||||
}
|
||||
else if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests && message.includes(': Listening for Jobs')) {
|
||||
core.setOutput('cloud runner stop watching', 'true');
|
||||
shouldReadLogs = false;
|
||||
shouldCleanup = false;
|
||||
core.warning('cloud runner stop watching');
|
||||
}
|
||||
message = `[${cloud_runner_statics_1.CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (cloud_runner_1.default.buildParameters.cloudRunnerIntegrationTests) {
|
||||
output += message;
|
||||
}
|
||||
cloud_runner_logger_1.default.log(message);
|
||||
return { shouldReadLogs, shouldCleanup, output };
|
||||
}
|
||||
}
|
||||
exports.FollowLogStreamService = FollowLogStreamService;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8915:
|
||||
|
||||
Reference in New Issue
Block a user