Cloud Runner v2 (#310)
This commit is contained in:
@@ -42,15 +42,13 @@ describe('BuildParameters', () => {
|
||||
it('returns the android version code with provided input', async () => {
|
||||
const mockValue = '42';
|
||||
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidVersionCode: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android version code from version by default', async () => {
|
||||
const mockValue = '';
|
||||
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
|
||||
});
|
||||
|
||||
it('determines the android sdk manager parameters only once', async () => {
|
||||
@@ -61,19 +59,19 @@ describe('BuildParameters', () => {
|
||||
it('returns the platform', async () => {
|
||||
const mockValue = 'somePlatform';
|
||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ platform: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ platform: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the project path', async () => {
|
||||
const mockValue = 'path/to/project';
|
||||
jest.spyOn(Input, 'projectPath', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the build name', async () => {
|
||||
const mockValue = 'someBuildName';
|
||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the build path', async () => {
|
||||
@@ -82,15 +80,13 @@ describe('BuildParameters', () => {
|
||||
const expectedBuildPath = `${mockPath}/${mockPlatform}`;
|
||||
jest.spyOn(Input, 'buildsPath', 'get').mockReturnValue(mockPath);
|
||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockPlatform);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ buildPath: expectedBuildPath }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildPath: expectedBuildPath }));
|
||||
});
|
||||
|
||||
it('returns the build file', async () => {
|
||||
const mockValue = 'someBuildName';
|
||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
|
||||
});
|
||||
|
||||
test.each([Platform.types.StandaloneWindows, Platform.types.StandaloneWindows64])(
|
||||
@@ -98,7 +94,7 @@ describe('BuildParameters', () => {
|
||||
async (targetPlatform) => {
|
||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ buildFile: `${targetPlatform}.exe` }),
|
||||
);
|
||||
},
|
||||
@@ -108,7 +104,7 @@ describe('BuildParameters', () => {
|
||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(false);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ buildFile: `${targetPlatform}.apk` }),
|
||||
);
|
||||
});
|
||||
@@ -117,7 +113,7 @@ describe('BuildParameters', () => {
|
||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(true);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ buildFile: `${targetPlatform}.aab` }),
|
||||
);
|
||||
});
|
||||
@@ -125,53 +121,43 @@ describe('BuildParameters', () => {
|
||||
it('returns the build method', async () => {
|
||||
const mockValue = 'Namespace.ClassName.BuildMethod';
|
||||
jest.spyOn(Input, 'buildMethod', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android keystore name', async () => {
|
||||
const mockValue = 'keystore.keystore';
|
||||
jest.spyOn(Input, 'androidKeystoreName', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidKeystoreName: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreName: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android keystore base64-encoded content', async () => {
|
||||
const mockValue = 'secret';
|
||||
jest.spyOn(Input, 'androidKeystoreBase64', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidKeystoreBase64: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreBase64: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android keystore pass', async () => {
|
||||
const mockValue = 'secret';
|
||||
jest.spyOn(Input, 'androidKeystorePass', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidKeystorePass: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystorePass: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android keyalias name', async () => {
|
||||
const mockValue = 'secret';
|
||||
jest.spyOn(Input, 'androidKeyaliasName', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidKeyaliasName: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasName: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android keyalias pass', async () => {
|
||||
const mockValue = 'secret';
|
||||
jest.spyOn(Input, 'androidKeyaliasPass', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidKeyaliasPass: mockValue }),
|
||||
);
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasPass: mockValue }));
|
||||
});
|
||||
|
||||
it('returns the android target sdk version', async () => {
|
||||
const mockValue = 'AndroidApiLevelAuto';
|
||||
jest.spyOn(Input, 'androidTargetSdkVersion', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect(BuildParameters.create()).resolves.toEqual(
|
||||
expect.objectContaining({ androidTargetSdkVersion: mockValue }),
|
||||
);
|
||||
});
|
||||
@@ -179,7 +165,7 @@ describe('BuildParameters', () => {
|
||||
it('returns the custom parameters', async () => {
|
||||
const mockValue = '-profile SomeProfile -someBoolean -someValue exampleValue';
|
||||
jest.spyOn(Input, 'customParameters', 'get').mockReturnValue(mockValue);
|
||||
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
|
||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import * as core from '@actions/core';
|
||||
import AndroidVersioning from './android-versioning';
|
||||
import CloudRunnerConstants from './cloud-runner/services/cloud-runner-constants';
|
||||
import CloudRunnerNamespace from './cloud-runner/services/cloud-runner-namespace';
|
||||
import Input from './input';
|
||||
import Platform from './platform';
|
||||
import UnityVersioning from './unity-versioning';
|
||||
@@ -27,17 +30,29 @@ class BuildParameters {
|
||||
public androidSdkManagerParameters!: string;
|
||||
public customParameters!: string;
|
||||
public sshAgent!: string;
|
||||
public cloudRunnerCluster!: string;
|
||||
public awsBaseStackName!: string;
|
||||
public gitPrivateToken!: string;
|
||||
public remoteBuildCluster!: string;
|
||||
public awsStackName!: string;
|
||||
public kubeConfig!: string;
|
||||
public githubToken!: string;
|
||||
public remoteBuildMemory!: string;
|
||||
public remoteBuildCpu!: string;
|
||||
public cloudRunnerMemory!: string;
|
||||
public cloudRunnerCpu!: string;
|
||||
public kubeVolumeSize!: string;
|
||||
public kubeVolume!: string;
|
||||
public chownFilesTo!: string;
|
||||
|
||||
public postBuildSteps!: string;
|
||||
public preBuildSteps!: string;
|
||||
public customJob!: string;
|
||||
public runNumber!: string;
|
||||
public branch!: string;
|
||||
public githubRepo!: string;
|
||||
public gitSha!: string;
|
||||
public logId!: string;
|
||||
public buildGuid!: string;
|
||||
|
||||
static async create(): Promise<BuildParameters> {
|
||||
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
||||
|
||||
@@ -87,16 +102,27 @@ class BuildParameters {
|
||||
androidSdkManagerParameters,
|
||||
customParameters: Input.customParameters,
|
||||
sshAgent: Input.sshAgent,
|
||||
gitPrivateToken: Input.gitPrivateToken,
|
||||
gitPrivateToken: await Input.gitPrivateToken(),
|
||||
chownFilesTo: Input.chownFilesTo,
|
||||
remoteBuildCluster: Input.remoteBuildCluster,
|
||||
awsStackName: Input.awsStackName,
|
||||
cloudRunnerCluster: Input.cloudRunnerCluster,
|
||||
awsBaseStackName: Input.awsBaseStackName,
|
||||
kubeConfig: Input.kubeConfig,
|
||||
githubToken: Input.githubToken,
|
||||
remoteBuildMemory: Input.remoteBuildMemory,
|
||||
remoteBuildCpu: Input.remoteBuildCpu,
|
||||
githubToken: await Input.githubToken(),
|
||||
cloudRunnerMemory: Input.cloudRunnerMemory,
|
||||
cloudRunnerCpu: Input.cloudRunnerCpu,
|
||||
kubeVolumeSize: Input.kubeVolumeSize,
|
||||
kubeVolume: Input.kubeVolume,
|
||||
postBuildSteps: Input.postBuildSteps,
|
||||
preBuildSteps: Input.preBuildSteps,
|
||||
customJob: Input.customJob,
|
||||
runNumber: Input.runNumber,
|
||||
branch: await Input.branch(),
|
||||
githubRepo: await Input.githubRepo(),
|
||||
remoteBuildCluster: Input.cloudRunnerCluster,
|
||||
awsStackName: Input.awsBaseStackName,
|
||||
gitSha: Input.gitSha,
|
||||
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
||||
buildGuid: CloudRunnerNamespace.generateBuildName(Input.runNumber, Input.targetPlatform),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
23
src/model/cli/cli-decorator.ts
Normal file
23
src/model/cli/cli-decorator.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
const targets = new Array();
|
||||
export function CliFunction(key: string, description: string) {
|
||||
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
|
||||
targets.push({
|
||||
target,
|
||||
propertyKey,
|
||||
descriptor,
|
||||
key,
|
||||
description,
|
||||
});
|
||||
};
|
||||
}
|
||||
export function GetCliFunctions(key) {
|
||||
return targets.find((x) => x.key === key);
|
||||
}
|
||||
export function GetAllCliModes() {
|
||||
return targets.map((x) => {
|
||||
return {
|
||||
key: x.key,
|
||||
description: x.description,
|
||||
};
|
||||
});
|
||||
}
|
||||
88
src/model/cli/cli.ts
Normal file
88
src/model/cli/cli.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { Command } from 'commander-ts';
|
||||
import { BuildParameters, CloudRunner, ImageTag, Input } from '..';
|
||||
import * as core from '@actions/core';
|
||||
import { ActionYamlReader } from '../input-readers/action-yaml';
|
||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||
import { CliFunction, GetAllCliModes, GetCliFunctions } from './cli-decorator';
|
||||
import { RemoteClientLogger } from './remote-client/remote-client-services/remote-client-logger';
|
||||
import { CloudRunnerState } from '../cloud-runner/state/cloud-runner-state';
|
||||
import { SetupCloudRunnerRepository } from './remote-client/setup-cloud-runner-repository';
|
||||
import * as SDK from 'aws-sdk';
|
||||
|
||||
export class CLI {
|
||||
static async RunCli(options: any): Promise<void> {
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
const results = GetCliFunctions(options.mode);
|
||||
|
||||
if (results === undefined || results.length === 0) {
|
||||
throw new Error('no CLI mode found');
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||
|
||||
options.versioning = 'None';
|
||||
Input.cliOptions = options;
|
||||
return await results.target[results.propertyKey]();
|
||||
}
|
||||
static isCliMode(options: any) {
|
||||
return options.mode !== undefined && options.mode !== '';
|
||||
}
|
||||
|
||||
public static SetupCli() {
|
||||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
const properties = Object.getOwnPropertyNames(Input);
|
||||
core.info(`\n`);
|
||||
core.info(`INPUT:`);
|
||||
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
|
||||
for (const element of properties) {
|
||||
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
|
||||
if (Input[element] !== undefined && Input[element] !== '' && typeof Input[element] !== `function`) {
|
||||
core.info(`${element} ${Input[element]}`);
|
||||
}
|
||||
}
|
||||
core.info(`\n`);
|
||||
program.option(
|
||||
'-m, --mode <mode>',
|
||||
GetAllCliModes()
|
||||
.map((x) => `${x.key} (${x.description})`)
|
||||
.join(` | `),
|
||||
);
|
||||
program.parse(process.argv);
|
||||
|
||||
return program.opts();
|
||||
}
|
||||
|
||||
@CliFunction(`cli`, `runs a cloud runner build`)
|
||||
public static async CLIBuild(): Promise<string> {
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
return await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
}
|
||||
|
||||
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
|
||||
static async runRemoteClientJob() {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
RemoteClientLogger.log(`Build Params:
|
||||
${JSON.stringify(buildParameter, undefined, 4)}
|
||||
`);
|
||||
CloudRunnerState.setup(buildParameter);
|
||||
await SetupCloudRunnerRepository.run();
|
||||
}
|
||||
|
||||
@CliFunction(`cach-push`, `push to cache`)
|
||||
static async cachePush() {}
|
||||
|
||||
@CliFunction(`cach-pull`, `pull from cache`)
|
||||
static async cachePull() {}
|
||||
|
||||
@CliFunction(`garbage-collect-aws`, `garbage collect aws`)
|
||||
static async garbageCollectAws() {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new SDK.CloudFormation();
|
||||
|
||||
const stacks = await CF.listStacks().promise();
|
||||
CloudRunnerLogger.log(JSON.stringify(stacks, undefined, 4));
|
||||
}
|
||||
}
|
||||
117
src/model/cli/remote-client/remote-client-services/caching.ts
Normal file
117
src/model/cli/remote-client/remote-client-services/caching.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { assert } from 'console';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { Input } from '../../..';
|
||||
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import { LFSHashing } from './lfs-hashing';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class Caching {
|
||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheKey: string) {
|
||||
const startPath = process.cwd();
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
process.chdir(path.resolve(sourceFolder, '..'));
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
CloudRunnerLogger.log(
|
||||
`Hashed cache folder ${await LFSHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
|
||||
sourceFolder,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls ${path.basename(sourceFolder)}`);
|
||||
}
|
||||
await CloudRunnerSystem.Run(`zip ${cacheKey}.zip ${path.basename(sourceFolder)}`);
|
||||
assert(fs.existsSync(`${cacheKey}.zip`), 'cache zip exists');
|
||||
assert(fs.existsSync(path.basename(sourceFolder)), 'source folder exists');
|
||||
await CloudRunnerSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
||||
RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
||||
assert(fs.existsSync(`${path.join(cacheFolder, cacheKey)}.zip`), 'cache zip exists inside cache folder');
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls ${cacheFolder}`);
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheKey: string = ``) {
|
||||
const startPath = process.cwd();
|
||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
fs.mkdirSync(cacheFolder);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destinationFolder)) {
|
||||
fs.mkdirSync(destinationFolder);
|
||||
}
|
||||
|
||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
||||
.replace(/\n/g, ``)
|
||||
.replace('.zip', '');
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
|
||||
const cacheSelection = cacheKey !== `` && fs.existsSync(`${cacheKey}.zip`) ? cacheKey : latestInBranch;
|
||||
await CloudRunnerLogger.log(`cache key ${cacheKey} selection ${cacheSelection}`);
|
||||
|
||||
if (fs.existsSync(`${cacheSelection}.zip`)) {
|
||||
const resultsFolder = `results${CloudRunnerState.buildParams.buildGuid}`;
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.zip`);
|
||||
assert(`${fs.existsSync(destinationFolder)}`);
|
||||
assert(`${fs.existsSync(`${cacheSelection}.zip`)}`);
|
||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
await CloudRunnerSystem.Run(`unzip ${cacheSelection}.zip -d ${path.basename(resultsFolder)}`);
|
||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||
assert(`${fs.existsSync(fullResultsFolder)}`);
|
||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||
if (fs.existsSync(destinationFolder)) {
|
||||
fs.rmSync(destinationFolder, { recursive: true, force: true });
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`mv "${fullResultsFolder}/${path.basename(destinationFolder)}" "${destinationParentFolder}"`,
|
||||
);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${destinationParentFolder}`);
|
||||
}
|
||||
} else {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheKey}.zip doesn't exist ${destinationFolder}`);
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
|
||||
public static handleCachePurging() {
|
||||
if (process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined) {
|
||||
RemoteClientLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { exec } from 'child_process';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class CloudRunnerSystem {
|
||||
public static async Run(command: string, suppressError = false) {
|
||||
for (const element of command.split(`\n`)) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
return await new Promise<string>((promise) => {
|
||||
let output = '';
|
||||
const child = exec(command, (error, stdout, stderr) => {
|
||||
if (error && !suppressError) {
|
||||
throw error;
|
||||
}
|
||||
if (stderr) {
|
||||
const diagnosticOutput = `${stderr.toString()}`;
|
||||
RemoteClientLogger.logCliDiagnostic(diagnosticOutput);
|
||||
output += diagnosticOutput;
|
||||
return;
|
||||
}
|
||||
const outputChunk = `${stdout}`;
|
||||
output += outputChunk;
|
||||
});
|
||||
child.on('close', function (code) {
|
||||
RemoteClientLogger.log(`[Exit code ${code}]`);
|
||||
if (code !== 0 && !suppressError) {
|
||||
throw new Error(output);
|
||||
}
|
||||
const outputLines = output.split(`\n`);
|
||||
for (const element of outputLines) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
promise(output);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import path from 'path';
|
||||
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import fs from 'fs';
|
||||
import { assert } from 'console';
|
||||
import { Input } from '../../..';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class LFSHashing {
|
||||
public static async createLFSHashFiles() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
await CloudRunnerSystem.Run(`md5sum .lfs-assets-guid > .lfs-assets-guid-sum`);
|
||||
assert(fs.existsSync(`.lfs-assets-guid-sum`));
|
||||
assert(fs.existsSync(`.lfs-assets-guid`));
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs
|
||||
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8')
|
||||
.replace(/\n/g, ``),
|
||||
lfsGuidSum: fs
|
||||
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8')
|
||||
.replace(/\n/g, ``),
|
||||
};
|
||||
if (Input.cloudRunnerTests) {
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||
}
|
||||
return lfsHashes;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
public static async hashAllFiles(folder: string) {
|
||||
const startPath = process.cwd();
|
||||
process.chdir(folder);
|
||||
const result = await (await CloudRunnerSystem.Run(`find -type f -exec md5sum "{}" + | sort | md5sum`))
|
||||
.replace(/\n/g, '')
|
||||
.split(` `)[0];
|
||||
process.chdir(startPath);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||
|
||||
export class RemoteClientLogger {
|
||||
public static log(message: string) {
|
||||
CloudRunnerLogger.log(`[Client] ${message}`);
|
||||
}
|
||||
|
||||
public static logCliError(message: string) {
|
||||
CloudRunnerLogger.log(`[Client][Error] ${message}`);
|
||||
}
|
||||
|
||||
public static logCliDiagnostic(message: string) {
|
||||
CloudRunnerLogger.log(`[Client][Diagnostic] ${message}`);
|
||||
}
|
||||
|
||||
public static logWarning(message) {
|
||||
CloudRunnerLogger.logWarning(message);
|
||||
}
|
||||
}
|
||||
81
src/model/cli/remote-client/setup-cloud-runner-repository.ts
Normal file
81
src/model/cli/remote-client/setup-cloud-runner-repository.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { Caching } from './remote-client-services/caching';
|
||||
import { LFSHashing } from './remote-client-services/lfs-hashing';
|
||||
import { CloudRunnerSystem } from './remote-client-services/cloud-runner-system';
|
||||
import { Input } from '../..';
|
||||
import { RemoteClientLogger } from './remote-client-services/remote-client-logger';
|
||||
import path from 'path';
|
||||
import { assert } from 'console';
|
||||
|
||||
export class SetupCloudRunnerRepository {
|
||||
public static async run() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.cacheFolderFull}`);
|
||||
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls -lh`);
|
||||
await CloudRunnerSystem.Run(`tree`);
|
||||
}
|
||||
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls -lh`);
|
||||
await CloudRunnerSystem.Run(`tree`);
|
||||
}
|
||||
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerState.lfsCacheFolderFull,
|
||||
CloudRunnerState.lfsDirectoryFull,
|
||||
`${lfsHashes.lfsGuid}`,
|
||||
);
|
||||
await SetupCloudRunnerRepository.pullLatestLFS();
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerState.lfsCacheFolderFull,
|
||||
CloudRunnerState.lfsDirectoryFull,
|
||||
`${lfsHashes.lfsGuid}`,
|
||||
);
|
||||
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolderFull, CloudRunnerState.libraryFolderFull);
|
||||
Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
process.chdir(`${CloudRunnerState.repoPathFull}`);
|
||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||
await CloudRunnerSystem.Run(`git lfs install --skip-smudge`);
|
||||
await CloudRunnerSystem.Run(
|
||||
`git clone ${CloudRunnerState.targetBuildRepoUrl} ${path.resolve(
|
||||
`..`,
|
||||
path.basename(CloudRunnerState.repoPathFull),
|
||||
)}`,
|
||||
);
|
||||
assert(fs.existsSync(`.git`));
|
||||
RemoteClientLogger.log(`${CloudRunnerState.buildParams.branch}`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||
RemoteClientLogger.log(`pulled latest LFS files`);
|
||||
assert(fs.existsSync(CloudRunnerState.lfsDirectoryFull));
|
||||
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
|
||||
}
|
||||
}
|
||||
106
src/model/cloud-runner/aws/aws-base-stack.ts
Normal file
106
src/model/cloud-runner/aws/aws-base-stack.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import * as core from '@actions/core';
|
||||
import * as SDK from 'aws-sdk';
|
||||
import * as fs from 'fs';
|
||||
import path from 'path';
|
||||
const crypto = require('crypto');
|
||||
|
||||
export class AWSBaseStack {
|
||||
constructor(baseStackName: string) {
|
||||
this.baseStackName = baseStackName;
|
||||
}
|
||||
private baseStackName: string;
|
||||
|
||||
async setupBaseStack(CF: SDK.CloudFormation) {
|
||||
const baseStackName = this.baseStackName;
|
||||
|
||||
const baseStack = fs.readFileSync(path.join(__dirname, 'cloud-formations', 'base-setup.yml'), 'utf8');
|
||||
|
||||
// Cloud Formation Input
|
||||
const describeStackInput: SDK.CloudFormation.DescribeStacksInput = {
|
||||
StackName: baseStackName,
|
||||
};
|
||||
const parametersWithoutHash: SDK.CloudFormation.Parameter[] = [
|
||||
{ ParameterKey: 'EnvironmentName', ParameterValue: baseStackName },
|
||||
];
|
||||
const parametersHash = crypto
|
||||
.createHash('md5')
|
||||
.update(baseStack + JSON.stringify(parametersWithoutHash))
|
||||
.digest('hex');
|
||||
const parameters: SDK.CloudFormation.Parameter[] = [
|
||||
...parametersWithoutHash,
|
||||
...[{ ParameterKey: 'Version', ParameterValue: parametersHash }],
|
||||
];
|
||||
const updateInput: SDK.CloudFormation.UpdateStackInput = {
|
||||
StackName: baseStackName,
|
||||
TemplateBody: baseStack,
|
||||
Parameters: parameters,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
};
|
||||
const createStackInput: SDK.CloudFormation.CreateStackInput = {
|
||||
StackName: baseStackName,
|
||||
TemplateBody: baseStack,
|
||||
Parameters: parameters,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
};
|
||||
|
||||
const stacks = await CF.listStacks({
|
||||
StackStatusFilter: ['UPDATE_COMPLETE', 'CREATE_COMPLETE', 'ROLLBACK_COMPLETE'],
|
||||
}).promise();
|
||||
const stackNames = stacks.StackSummaries?.map((x) => x.StackName) || [];
|
||||
const stackExists: Boolean = stackNames.includes(baseStackName) || false;
|
||||
const describeStack = async () => {
|
||||
return await CF.describeStacks(describeStackInput).promise();
|
||||
};
|
||||
try {
|
||||
if (!stackExists) {
|
||||
CloudRunnerLogger.log(`${baseStackName} stack does not exist (${JSON.stringify(stackNames)})`);
|
||||
await CF.createStack(createStackInput).promise();
|
||||
CloudRunnerLogger.log(`created stack (version: ${parametersHash})`);
|
||||
}
|
||||
const CFState = await describeStack();
|
||||
let stack = CFState.Stacks?.[0];
|
||||
if (!stack) {
|
||||
throw new Error(`Base stack doesn't exist, even after creation, stackExists check: ${stackExists}`);
|
||||
}
|
||||
const stackVersion = stack.Parameters?.find((x) => x.ParameterKey === 'Version')?.ParameterValue;
|
||||
|
||||
if (stack.StackStatus === 'CREATE_IN_PROGRESS') {
|
||||
await CF.waitFor('stackCreateComplete', describeStackInput).promise();
|
||||
}
|
||||
|
||||
if (stackExists) {
|
||||
CloudRunnerLogger.log(`Base stack exists (version: ${stackVersion}, local version: ${parametersHash})`);
|
||||
if (parametersHash !== stackVersion) {
|
||||
CloudRunnerLogger.log(`Attempting update of base stack`);
|
||||
try {
|
||||
await CF.updateStack(updateInput).promise();
|
||||
} catch (error: any) {
|
||||
if (error['message'].includes('No updates are to be performed')) {
|
||||
CloudRunnerLogger.log(`No updates are to be performed`);
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Update Failed (Stack name: ${baseStackName})`);
|
||||
CloudRunnerLogger.log(error['message']);
|
||||
}
|
||||
CloudRunnerLogger.log(`Continuing...`);
|
||||
}
|
||||
} else {
|
||||
CloudRunnerLogger.log(`No update required`);
|
||||
}
|
||||
stack = (await describeStack()).Stacks?.[0];
|
||||
if (!stack) {
|
||||
throw new Error(
|
||||
`Base stack doesn't exist, even after updating and creation, stackExists check: ${stackExists}`,
|
||||
);
|
||||
}
|
||||
if (stack.StackStatus === 'UPDATE_IN_PROGRESS') {
|
||||
await CF.waitFor('stackUpdateComplete', describeStackInput).promise();
|
||||
}
|
||||
}
|
||||
CloudRunnerLogger.log('base stack is now ready');
|
||||
} catch (error) {
|
||||
core.error(JSON.stringify(await describeStack(), undefined, 4));
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
16
src/model/cloud-runner/aws/aws-error.ts
Normal file
16
src/model/cloud-runner/aws/aws-error.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import * as SDK from 'aws-sdk';
|
||||
import * as core from '@actions/core';
|
||||
import { Input } from '../..';
|
||||
|
||||
export class AWSError {
|
||||
static async handleStackCreationFailure(error: any, CF: SDK.CloudFormation, taskDefStackName: string) {
|
||||
CloudRunnerLogger.log('aws error: ');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
if (Input.cloudRunnerTests) {
|
||||
CloudRunnerLogger.log('Getting events and resources for task stack');
|
||||
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
|
||||
CloudRunnerLogger.log(JSON.stringify(events, undefined, 4));
|
||||
}
|
||||
}
|
||||
}
|
||||
141
src/model/cloud-runner/aws/aws-job-stack.ts
Normal file
141
src/model/cloud-runner/aws/aws-job-stack.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import * as SDK from 'aws-sdk';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { AWSTemplates } from './aws-templates';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { AWSError } from './aws-error';
|
||||
|
||||
export class AWSJobStack {
|
||||
private baseStackName: string;
|
||||
constructor(baseStackName: string) {
|
||||
this.baseStackName = baseStackName;
|
||||
}
|
||||
|
||||
public async setupCloudFormations(
|
||||
CF: SDK.CloudFormation,
|
||||
buildGuid: string,
|
||||
image: string,
|
||||
entrypoint: string[],
|
||||
commands: string,
|
||||
mountdir: string,
|
||||
workingdir: string,
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<CloudRunnerAWSTaskDef> {
|
||||
const taskDefStackName = `${this.baseStackName}-${buildGuid}`;
|
||||
let taskDefCloudFormation = AWSTemplates.readTaskCloudFormationTemplate();
|
||||
for (const secret of secrets) {
|
||||
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
|
||||
/[^\dA-Za-z]/g,
|
||||
'',
|
||||
)}`;
|
||||
if (typeof secret.ParameterValue == 'number') {
|
||||
secret.ParameterValue = `${secret.ParameterValue}`;
|
||||
}
|
||||
if (!secret.ParameterValue || secret.ParameterValue === '') {
|
||||
secrets = secrets.filter((x) => x !== secret);
|
||||
continue;
|
||||
}
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p1 - input',
|
||||
AWSTemplates.getParameterTemplate(secret.ParameterKey),
|
||||
);
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p2 - secret',
|
||||
AWSTemplates.getSecretTemplate(`${secret.ParameterKey}`),
|
||||
);
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p3 - container def',
|
||||
AWSTemplates.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey),
|
||||
);
|
||||
}
|
||||
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
|
||||
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
|
||||
});
|
||||
const parameters = [
|
||||
{
|
||||
ParameterKey: 'EnvironmentName',
|
||||
ParameterValue: this.baseStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'ImageUrl',
|
||||
ParameterValue: image,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'ServiceName',
|
||||
ParameterValue: taskDefStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'Command',
|
||||
ParameterValue: 'echo "this template should be overwritten when running a task"',
|
||||
},
|
||||
{
|
||||
ParameterKey: 'EntryPoint',
|
||||
ParameterValue: entrypoint.join(','),
|
||||
},
|
||||
{
|
||||
ParameterKey: 'WorkingDirectory',
|
||||
ParameterValue: workingdir,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'EFSMountDirectory',
|
||||
ParameterValue: mountdir,
|
||||
},
|
||||
...secretsMappedToCloudFormationParameters,
|
||||
];
|
||||
|
||||
let previousStackExists = true;
|
||||
while (previousStackExists) {
|
||||
previousStackExists = false;
|
||||
const stacks = await CF.listStacks().promise();
|
||||
if (!stacks.StackSummaries) {
|
||||
throw new Error('Faild to get stacks');
|
||||
}
|
||||
for (let index = 0; index < stacks.StackSummaries.length; index++) {
|
||||
const element = stacks.StackSummaries[index];
|
||||
if (element.StackName === taskDefStackName && element.StackStatus !== 'DELETE_COMPLETE') {
|
||||
previousStackExists = true;
|
||||
CloudRunnerLogger.log(`Previous stack still exists: ${JSON.stringify(element)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await CF.createStack({
|
||||
StackName: taskDefStackName,
|
||||
TemplateBody: taskDefCloudFormation,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
Parameters: parameters,
|
||||
}).promise();
|
||||
CloudRunnerLogger.log('Creating cloud runner job');
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
||||
} catch (error) {
|
||||
await AWSError.handleStackCreationFailure(
|
||||
error,
|
||||
CF,
|
||||
taskDefStackName,
|
||||
//taskDefCloudFormation,
|
||||
//parameters,
|
||||
//secrets,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const taskDefResources = (
|
||||
await CF.describeStackResources({
|
||||
StackName: taskDefStackName,
|
||||
}).promise()
|
||||
).StackResources;
|
||||
|
||||
const baseResources = (await CF.describeStackResources({ StackName: this.baseStackName }).promise()).StackResources;
|
||||
|
||||
return {
|
||||
taskDefStackName,
|
||||
taskDefCloudFormation,
|
||||
taskDefResources,
|
||||
baseResources,
|
||||
};
|
||||
}
|
||||
}
|
||||
228
src/model/cloud-runner/aws/aws-task-runner.ts
Normal file
228
src/model/cloud-runner/aws/aws-task-runner.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import * as AWS from 'aws-sdk';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import * as zlib from 'zlib';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
|
||||
class AWSTaskRunner {
|
||||
static async runTask(
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
buildGuid: string,
|
||||
commands: string,
|
||||
) {
|
||||
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
|
||||
const taskDefinition =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
|
||||
const SubnetOne =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetOne')?.PhysicalResourceId || '';
|
||||
const SubnetTwo =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')?.PhysicalResourceId || '';
|
||||
const ContainerSecurityGroup =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')?.PhysicalResourceId || '';
|
||||
const streamName =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
|
||||
|
||||
const task = await ECS.runTask({
|
||||
cluster,
|
||||
taskDefinition,
|
||||
platformVersion: '1.4.0',
|
||||
overrides: {
|
||||
containerOverrides: [
|
||||
{
|
||||
name: taskDef.taskDefStackName,
|
||||
environment,
|
||||
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunnerState.buildParams)],
|
||||
},
|
||||
],
|
||||
},
|
||||
launchType: 'FARGATE',
|
||||
networkConfiguration: {
|
||||
awsvpcConfiguration: {
|
||||
subnets: [SubnetOne, SubnetTwo],
|
||||
assignPublicIp: 'ENABLED',
|
||||
securityGroups: [ContainerSecurityGroup],
|
||||
},
|
||||
},
|
||||
}).promise();
|
||||
|
||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||
const taskArn = task.tasks?.[0].taskArn || '';
|
||||
|
||||
try {
|
||||
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||
} catch (error_) {
|
||||
const error = error_ as Error;
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job has ended ${
|
||||
(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].lastStatus
|
||||
}`,
|
||||
);
|
||||
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
CloudRunnerLogger.log(`Cloud runner job is running`);
|
||||
|
||||
const output = await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
|
||||
CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0 && exitCode !== undefined) {
|
||||
core.error(
|
||||
`job failed with exit code ${exitCode} ${JSON.stringify(
|
||||
await ECS.describeTasks({ tasks: [taskArn], cluster }).promise(),
|
||||
undefined,
|
||||
4,
|
||||
)}`,
|
||||
);
|
||||
throw new Error(`job failed with exit code ${exitCode}`);
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
|
||||
const tasks = await ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
if (tasks.tasks?.[0]) {
|
||||
return tasks.tasks?.[0];
|
||||
} else {
|
||||
throw new Error('No task found');
|
||||
}
|
||||
}
|
||||
|
||||
static async streamLogsUntilTaskStops(
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
clusterName: string,
|
||||
taskArn: string,
|
||||
kinesisStreamName: string,
|
||||
) {
|
||||
const kinesis = new AWS.Kinesis();
|
||||
const stream = await AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
||||
let iterator = await AWSTaskRunner.getLogIterator(kinesis, stream);
|
||||
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job status is ${(await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn))?.lastStatus}`,
|
||||
);
|
||||
|
||||
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${CF.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let shouldReadLogs = true;
|
||||
let timestamp: number = 0;
|
||||
let output = '';
|
||||
while (shouldReadLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
({ iterator, shouldReadLogs, output } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
kinesis,
|
||||
iterator,
|
||||
shouldReadLogs,
|
||||
taskDef,
|
||||
output,
|
||||
));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
private static async handleLogStreamIteration(
|
||||
kinesis: AWS.Kinesis,
|
||||
iterator: string,
|
||||
shouldReadLogs: boolean,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
output: string,
|
||||
) {
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
ShardIterator: iterator,
|
||||
})
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
|
||||
return { iterator, shouldReadLogs, output };
|
||||
}
|
||||
|
||||
private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) {
|
||||
if (taskData?.lastStatus !== 'RUNNING') {
|
||||
if (timestamp === 0) {
|
||||
CloudRunnerLogger.log('## Cloud runner job stopped, streaming end of logs');
|
||||
timestamp = Date.now();
|
||||
}
|
||||
if (timestamp !== 0 && Date.now() - timestamp > 30000) {
|
||||
CloudRunnerLogger.log('## Cloud runner status is not RUNNING for 30 seconds, last query for logs');
|
||||
shouldReadLogs = false;
|
||||
}
|
||||
CloudRunnerLogger.log(`## Status of job: ${taskData.lastStatus}`);
|
||||
}
|
||||
return { timestamp, shouldReadLogs };
|
||||
}
|
||||
|
||||
private static logRecords(
|
||||
records,
|
||||
iterator: string,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
shouldReadLogs: boolean,
|
||||
output: string,
|
||||
) {
|
||||
if (records.Records.length > 0 && iterator) {
|
||||
for (let index = 0; index < records.Records.length; index++) {
|
||||
const json = JSON.parse(
|
||||
zlib.gunzipSync(Buffer.from(records.Records[index].Data as string, 'base64')).toString('utf8'),
|
||||
);
|
||||
if (json.messageType === 'DATA_MESSAGE') {
|
||||
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
||||
let message = json.logEvents[logEventsIndex].message;
|
||||
if (json.logEvents[logEventsIndex].message.includes(`---${CloudRunnerState.buildParams.logId}`)) {
|
||||
CloudRunnerLogger.log('End of log transmission received');
|
||||
shouldReadLogs = false;
|
||||
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||
core.warning('LIBRARY NOT FOUND!');
|
||||
}
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
CloudRunnerLogger.log(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { shouldReadLogs, output };
|
||||
}
|
||||
|
||||
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
|
||||
return await kinesis
|
||||
.describeStream({
|
||||
StreamName: kinesisStreamName,
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
|
||||
private static async getLogIterator(kinesis: AWS.Kinesis, stream) {
|
||||
return (
|
||||
(
|
||||
await kinesis
|
||||
.getShardIterator({
|
||||
ShardIteratorType: 'TRIM_HORIZON',
|
||||
StreamName: stream.StreamDescription.StreamName,
|
||||
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
||||
})
|
||||
.promise()
|
||||
).ShardIterator || ''
|
||||
);
|
||||
}
|
||||
}
|
||||
export default AWSTaskRunner;
|
||||
38
src/model/cloud-runner/aws/aws-templates.ts
Normal file
38
src/model/cloud-runner/aws/aws-templates.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import * as fs from 'fs';
|
||||
|
||||
export class AWSTemplates {
|
||||
public static getParameterTemplate(p1) {
|
||||
return `
|
||||
${p1}:
|
||||
Type: String
|
||||
Default: ''
|
||||
`;
|
||||
}
|
||||
|
||||
public static getSecretTemplate(p1) {
|
||||
return `
|
||||
${p1}Secret:
|
||||
Type: AWS::SecretsManager::Secret
|
||||
Properties:
|
||||
Name: '${p1}'
|
||||
SecretString: !Ref ${p1}
|
||||
`;
|
||||
}
|
||||
|
||||
public static getSecretDefinitionTemplate(p1, p2) {
|
||||
return `
|
||||
- Name: '${p1}'
|
||||
ValueFrom: !Ref ${p2}Secret
|
||||
`;
|
||||
}
|
||||
|
||||
public static insertAtTemplate(template, insertionKey, insertion) {
|
||||
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
|
||||
template = [template.slice(0, index), insertion, template.slice(index)].join('');
|
||||
return template;
|
||||
}
|
||||
|
||||
public static readTaskCloudFormationTemplate(): string {
|
||||
return fs.readFileSync(`${__dirname}/cloud-formations/task-def-formation.yml`, 'utf8');
|
||||
}
|
||||
}
|
||||
391
src/model/cloud-runner/aws/cloud-formations/base-setup.yml
Normal file
391
src/model/cloud-runner/aws/cloud-formations/base-setup.yml
Normal file
@@ -0,0 +1,391 @@
|
||||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: AWS Fargate cluster that can span public and private subnets. Supports
|
||||
public facing load balancers, private internal load balancers, and
|
||||
both internal and external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
Version:
|
||||
Type: String
|
||||
Description: 'hash of template'
|
||||
|
||||
# ContainerPort:
|
||||
# Type: Number
|
||||
# Default: 80
|
||||
# Description: What port number the application inside the docker container is binding to
|
||||
|
||||
Mappings:
|
||||
# Hard values for the subnet masks. These masks define
|
||||
# the range of internal IP addresses that can be assigned.
|
||||
# The VPC can have all IP's from 10.0.0.0 to 10.0.255.255
|
||||
# There are four subnets which cover the ranges:
|
||||
#
|
||||
# 10.0.0.0 - 10.0.0.255
|
||||
# 10.0.1.0 - 10.0.1.255
|
||||
# 10.0.2.0 - 10.0.2.255
|
||||
# 10.0.3.0 - 10.0.3.255
|
||||
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: '10.0.0.0/16'
|
||||
PublicOne:
|
||||
CIDR: '10.0.0.0/24'
|
||||
PublicTwo:
|
||||
CIDR: '10.0.1.0/24'
|
||||
|
||||
Resources:
|
||||
# VPC in which containers will be networked.
|
||||
# It has two public subnets, and two private subnets.
|
||||
# We distribute the subnets across the first two available subnets
|
||||
# for the region, for high availability.
|
||||
VPC:
|
||||
Type: AWS::EC2::VPC
|
||||
Properties:
|
||||
EnableDnsSupport: true
|
||||
EnableDnsHostnames: true
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
|
||||
EFSServerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: 'efs-server-endpoints'
|
||||
GroupDescription: Which client ip addrs are allowed to access EFS server
|
||||
VpcId: !Ref 'VPC'
|
||||
SecurityGroupIngress:
|
||||
- IpProtocol: tcp
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
SourceSecurityGroupId: !Ref ContainerSecurityGroup
|
||||
#CidrIp: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
# A security group for the containers we will run in Fargate.
|
||||
# Rules are added to this security group based on what ingress you
|
||||
# add for the cluster.
|
||||
ContainerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: 'task security group'
|
||||
GroupDescription: Access to the Fargate containers
|
||||
VpcId: !Ref 'VPC'
|
||||
# SecurityGroupIngress:
|
||||
# - IpProtocol: tcp
|
||||
# FromPort: !Ref ContainerPort
|
||||
# ToPort: !Ref ContainerPort
|
||||
# CidrIp: 0.0.0.0/0
|
||||
SecurityGroupEgress:
|
||||
- IpProtocol: -1
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
CidrIp: '0.0.0.0/0'
|
||||
|
||||
# Two public subnets, where containers can have public IP addresses
|
||||
PublicSubnetOne:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 0
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicOne', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
PublicSubnetTwo:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 1
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicTwo', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
# Setup networking resources for the public subnets. Containers
|
||||
# in the public subnets have public IP addresses and the routing table
|
||||
# sends network traffic via the internet gateway.
|
||||
InternetGateway:
|
||||
Type: AWS::EC2::InternetGateway
|
||||
GatewayAttachement:
|
||||
Type: AWS::EC2::VPCGatewayAttachment
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
InternetGatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a Internet Gateway to route table makes it public.
|
||||
PublicRouteTable:
|
||||
Type: AWS::EC2::RouteTable
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
PublicRoute:
|
||||
Type: AWS::EC2::Route
|
||||
DependsOn: GatewayAttachement
|
||||
Properties:
|
||||
RouteTableId: !Ref 'PublicRouteTable'
|
||||
DestinationCidrBlock: '0.0.0.0/0'
|
||||
GatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a public route table makes a subnet public.
|
||||
PublicSubnetOneRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
PublicSubnetTwoRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
|
||||
# ECS Resources
|
||||
ECSCluster:
|
||||
Type: AWS::ECS::Cluster
|
||||
|
||||
# A role used to allow AWS Autoscaling to inspect stats and adjust scaleable targets
|
||||
# on your AWS account
|
||||
AutoscalingRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [application-autoscaling.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'application-autoscaling:*'
|
||||
- 'cloudwatch:DescribeAlarms'
|
||||
- 'cloudwatch:PutMetricAlarm'
|
||||
- 'ecs:DescribeServices'
|
||||
- 'ecs:UpdateService'
|
||||
Resource: '*'
|
||||
|
||||
# This is an IAM role which authorizes ECS to manage resources on your
|
||||
# account on your behalf, such as updating your load balancer with the
|
||||
# details of where your containers are, so that traffic can reach your
|
||||
# containers.
|
||||
ECSRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: ecs-service
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
# Rules which allow ECS to attach network interfaces to instances
|
||||
# on your behalf in order for awsvpc networking mode to work right
|
||||
- 'ec2:AttachNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterfacePermission'
|
||||
- 'ec2:DeleteNetworkInterface'
|
||||
- 'ec2:DeleteNetworkInterfacePermission'
|
||||
- 'ec2:Describe*'
|
||||
- 'ec2:DetachNetworkInterface'
|
||||
|
||||
# Rules which allow ECS to update load balancers on your behalf
|
||||
# with the information sabout how to send traffic to your containers
|
||||
- 'elasticloadbalancing:DeregisterInstancesFromLoadBalancer'
|
||||
- 'elasticloadbalancing:DeregisterTargets'
|
||||
- 'elasticloadbalancing:Describe*'
|
||||
- 'elasticloadbalancing:RegisterInstancesWithLoadBalancer'
|
||||
- 'elasticloadbalancing:RegisterTargets'
|
||||
Resource: '*'
|
||||
|
||||
# This is a role which is used by the ECS tasks themselves.
|
||||
ECSTaskExecutionRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs-tasks.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: AmazonECSTaskExecutionRolePolicy
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
# Allow the use of secret manager
|
||||
- 'secretsmanager:GetSecretValue'
|
||||
- 'kms:Decrypt'
|
||||
|
||||
# Allow the ECS Tasks to download images from ECR
|
||||
- 'ecr:GetAuthorizationToken'
|
||||
- 'ecr:BatchCheckLayerAvailability'
|
||||
- 'ecr:GetDownloadUrlForLayer'
|
||||
- 'ecr:BatchGetImage'
|
||||
|
||||
# Allow the ECS tasks to upload logs to CloudWatch
|
||||
- 'logs:CreateLogStream'
|
||||
- 'logs:PutLogEvents'
|
||||
Resource: '*'
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Type: 'AWS::IAM::Role'
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Principal:
|
||||
Service: ['lambda.amazonaws.com']
|
||||
Action: 'sts:AssumeRole'
|
||||
Path: '/'
|
||||
Policies:
|
||||
- PolicyName: DeleteCFNLambdaExecutionRole
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'logs:CreateLogGroup'
|
||||
- 'logs:CreateLogStream'
|
||||
- 'logs:PutLogEvents'
|
||||
Resource: 'arn:aws:logs:*:*:*'
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'cloudformation:DeleteStack'
|
||||
- 'kinesis:DeleteStream'
|
||||
- 'secretsmanager:DeleteSecret'
|
||||
- 'kinesis:DescribeStreamSummary'
|
||||
- 'logs:DeleteLogGroup'
|
||||
- 'logs:DeleteSubscriptionFilter'
|
||||
- 'ecs:DeregisterTaskDefinition'
|
||||
- 'lambda:DeleteFunction'
|
||||
- 'lambda:InvokeFunction'
|
||||
- 'events:RemoveTargets'
|
||||
- 'events:DeleteRule'
|
||||
- 'lambda:RemovePermission'
|
||||
Resource: '*'
|
||||
|
||||
### cloud watch to kinesis role
|
||||
CloudWatchIAMRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [logs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'kinesis:PutRecord'
|
||||
Resource: '*'
|
||||
|
||||
#####################EFS#####################
|
||||
EfsFileStorage:
|
||||
Type: 'AWS::EFS::FileSystem'
|
||||
Properties:
|
||||
BackupPolicy:
|
||||
Status: ENABLED
|
||||
PerformanceMode: maxIO
|
||||
Encrypted: false
|
||||
|
||||
FileSystemPolicy:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: 'Allow'
|
||||
Action:
|
||||
- 'elasticfilesystem:ClientMount'
|
||||
- 'elasticfilesystem:ClientWrite'
|
||||
- 'elasticfilesystem:ClientRootAccess'
|
||||
Principal:
|
||||
AWS: '*'
|
||||
|
||||
MountTargetResource1:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
MountTargetResource2:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
Outputs:
|
||||
EfsFileStorageId:
|
||||
Description: 'The connection endpoint for the database.'
|
||||
Value: !Ref EfsFileStorage
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:EfsFileStorageId
|
||||
ClusterName:
|
||||
Description: The name of the ECS cluster
|
||||
Value: !Ref 'ECSCluster'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ClusterName
|
||||
AutoscalingRole:
|
||||
Description: The ARN of the role used for autoscaling
|
||||
Value: !GetAtt 'AutoscalingRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:AutoscalingRole
|
||||
ECSRole:
|
||||
Description: The ARN of the ECS role
|
||||
Value: !GetAtt 'ECSRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSRole
|
||||
ECSTaskExecutionRole:
|
||||
Description: The ARN of the ECS role tsk execution role
|
||||
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Description: Lambda execution role for cleaning up cloud formations
|
||||
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:DeleteCFNLambdaExecutionRole
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Description: The ARN of the CloudWatch role for subscription filter
|
||||
Value: !GetAtt 'CloudWatchIAMRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:CloudWatchIAMRole
|
||||
VpcId:
|
||||
Description: The ID of the VPC that this stack is deployed in
|
||||
Value: !Ref 'VPC'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:VpcId
|
||||
PublicSubnetOne:
|
||||
Description: Public subnet one
|
||||
Value: !Ref 'PublicSubnetOne'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetOne
|
||||
PublicSubnetTwo:
|
||||
Description: Public subnet two
|
||||
Value: !Ref 'PublicSubnetTwo'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetTwo
|
||||
|
||||
ContainerSecurityGroup:
|
||||
Description: A security group used to allow Fargate containers to receive traffic
|
||||
Value: !Ref 'ContainerSecurityGroup'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ContainerSecurityGroup
|
||||
@@ -0,0 +1,221 @@
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: >-
|
||||
AWS Fargate cluster that can span public and private subnets. Supports public
|
||||
facing load balancers, private internal load balancers, and both internal and
|
||||
external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
ServiceName:
|
||||
Type: String
|
||||
Default: example
|
||||
Description: A name for the service
|
||||
ImageUrl:
|
||||
Type: String
|
||||
Default: nginx
|
||||
Description: >-
|
||||
The url of a docker image that contains the application process that will
|
||||
handle the traffic for this service
|
||||
ContainerPort:
|
||||
Type: Number
|
||||
Default: 80
|
||||
Description: What port number the application inside the docker container is binding to
|
||||
ContainerCpu:
|
||||
Type: Number
|
||||
Default: 1024
|
||||
Description: How much CPU to give the container. 1024 is 1 CPU
|
||||
ContainerMemory:
|
||||
Type: Number
|
||||
Default: 2048
|
||||
Description: How much memory in megabytes to give the container
|
||||
BUILDGUID:
|
||||
Type: String
|
||||
Default: ''
|
||||
Command:
|
||||
Type: String
|
||||
Default: 'ls'
|
||||
EntryPoint:
|
||||
Type: String
|
||||
Default: '/bin/sh'
|
||||
WorkingDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata/'
|
||||
Role:
|
||||
Type: String
|
||||
Default: ''
|
||||
Description: >-
|
||||
(Optional) An IAM role to give the service's containers if the code within
|
||||
needs to access other AWS resources like S3 buckets, DynamoDB tables, etc
|
||||
EFSMountDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata'
|
||||
# template secrets p1 - input
|
||||
Mappings:
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: 10.0.0.0/16
|
||||
PublicOne:
|
||||
CIDR: 10.0.0.0/24
|
||||
PublicTwo:
|
||||
CIDR: 10.0.1.0/24
|
||||
Conditions:
|
||||
HasCustomRole: !Not
|
||||
- !Equals
|
||||
- Ref: Role
|
||||
- ''
|
||||
Resources:
|
||||
LogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
Properties:
|
||||
LogGroupName: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
SubscriptionFilter:
|
||||
Type: 'AWS::Logs::SubscriptionFilter'
|
||||
Properties:
|
||||
FilterPattern: ''
|
||||
RoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:CloudWatchIAMRole'
|
||||
LogGroupName: !Ref ServiceName
|
||||
DestinationArn:
|
||||
'Fn::GetAtt':
|
||||
- KinesisStream
|
||||
- Arn
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
- KinesisStream
|
||||
KinesisStream:
|
||||
Type: 'AWS::Kinesis::Stream'
|
||||
Properties:
|
||||
Name: !Ref ServiceName
|
||||
ShardCount: 1
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
|
||||
# template secrets p2 - secret
|
||||
|
||||
TaskDefinition:
|
||||
Type: 'AWS::ECS::TaskDefinition'
|
||||
Properties:
|
||||
Family: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
NetworkMode: awsvpc
|
||||
Volumes:
|
||||
- Name: efs-data
|
||||
EFSVolumeConfiguration:
|
||||
FilesystemId:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:EfsFileStorageId'
|
||||
TransitEncryption: ENABLED
|
||||
RequiresCompatibilities:
|
||||
- FARGATE
|
||||
ExecutionRoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:ECSTaskExecutionRole'
|
||||
TaskRoleArn:
|
||||
'Fn::If':
|
||||
- HasCustomRole
|
||||
- !Ref Role
|
||||
- !Ref 'AWS::NoValue'
|
||||
ContainerDefinitions:
|
||||
- Name: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
Image: !Ref ImageUrl
|
||||
EntryPoint:
|
||||
Fn::Split:
|
||||
- ','
|
||||
- !Ref EntryPoint
|
||||
Command:
|
||||
Fn::Split:
|
||||
- ','
|
||||
- !Ref Command
|
||||
WorkingDirectory: !Ref WorkingDirectory
|
||||
Environment:
|
||||
- Name: ALLOW_EMPTY_PASSWORD
|
||||
Value: 'yes'
|
||||
# template - env vars
|
||||
MountPoints:
|
||||
- SourceVolume: efs-data
|
||||
ContainerPath: !Ref EFSMountDirectory
|
||||
ReadOnly: false
|
||||
Secrets:
|
||||
# template secrets p3 - container def
|
||||
LogConfiguration:
|
||||
LogDriver: awslogs
|
||||
Options:
|
||||
awslogs-group: !Ref ServiceName
|
||||
awslogs-region: !Ref 'AWS::Region'
|
||||
awslogs-stream-prefix: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
dabb0116-abe0-48a6-a8af-cf9111c879a5:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
c6f18447-b879-4696-8873-f981b2cedd2b:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 210
|
||||
z: 1
|
||||
embeds: []
|
||||
7f809e91-9e5d-4678-98c1-c5085956c480:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 60
|
||||
'y': 300
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
- c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
aece53ae-b82d-4267-bc16-ed964b05db27:
|
||||
size:
|
||||
width: 150
|
||||
height: 150
|
||||
position:
|
||||
x: 60
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
4d2da56c-3643-46b8-aaee-e46e19f95fcc:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 11
|
||||
14eb957b-f094-4653-93c4-77b2f851953c:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
z: 12
|
||||
85c57444-e5bb-4230-bc85-e545cd4558f6:
|
||||
source:
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 13
|
||||
@@ -1,12 +1,9 @@
|
||||
import * as AWS from 'aws-sdk';
|
||||
|
||||
class RemoteBuilderTaskDef {
|
||||
class CloudRunnerAWSTaskDef {
|
||||
public taskDefStackName!: string;
|
||||
public taskDefCloudFormation!: string;
|
||||
public taskDefStackNameTTL!: string;
|
||||
public ttlCloudFormation!: string;
|
||||
public taskDefResources: AWS.CloudFormation.StackResources | undefined;
|
||||
public baseResources: AWS.CloudFormation.StackResources | undefined;
|
||||
public logid!: string;
|
||||
}
|
||||
export default RemoteBuilderTaskDef;
|
||||
export default CloudRunnerAWSTaskDef;
|
||||
98
src/model/cloud-runner/aws/index.ts
Normal file
98
src/model/cloud-runner/aws/index.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import * as SDK from 'aws-sdk';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import AWSTaskRunner from './aws-task-runner';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { AWSJobStack } from './aws-job-stack';
|
||||
import { AWSBaseStack } from './aws-base-stack';
|
||||
import { Input } from '../..';
|
||||
|
||||
class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
||||
private baseStackName: string;
|
||||
|
||||
constructor(buildParameters: BuildParameters) {
|
||||
this.baseStackName = buildParameters.awsBaseStackName;
|
||||
}
|
||||
async cleanupSharedResources(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
async setupSharedResources(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
|
||||
async runTask(
|
||||
buildGuid: string,
|
||||
image: string,
|
||||
commands: string,
|
||||
mountdir: string,
|
||||
workingdir: string,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
||||
const entrypoint = ['/bin/sh'];
|
||||
const startTimeMs = Date.now();
|
||||
|
||||
await new AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
const taskDef = await new AWSJobStack(this.baseStackName).setupCloudFormations(
|
||||
CF,
|
||||
buildGuid,
|
||||
image,
|
||||
entrypoint,
|
||||
commands,
|
||||
mountdir,
|
||||
workingdir,
|
||||
secrets,
|
||||
);
|
||||
|
||||
let postRunTaskTimeMs;
|
||||
let output = '';
|
||||
try {
|
||||
const postSetupStacksTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||
output = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
postRunTaskTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||
} finally {
|
||||
await this.cleanupResources(CF, taskDef);
|
||||
const postCleanupTimeMs = Date.now();
|
||||
if (postRunTaskTimeMs !== undefined)
|
||||
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) {
|
||||
CloudRunnerLogger.log('Cleanup starting');
|
||||
await CF.deleteStack({
|
||||
StackName: taskDef.taskDefStackName,
|
||||
}).promise();
|
||||
|
||||
await CF.waitFor('stackDeleteComplete', {
|
||||
StackName: taskDef.taskDefStackName,
|
||||
}).promise();
|
||||
CloudRunnerLogger.log(`Deleted Stack: ${taskDef.taskDefStackName}`);
|
||||
CloudRunnerLogger.log('Cleanup complete');
|
||||
}
|
||||
}
|
||||
export default AWSBuildEnvironment;
|
||||
3
src/model/cloud-runner/cloud-runner-statics.ts
Normal file
3
src/model/cloud-runner/cloud-runner-statics.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export class CloudRunnerStatics {
|
||||
public static readonly logPrefix = `Cloud-Runner-System`;
|
||||
}
|
||||
50
src/model/cloud-runner/cloud-runner.test.ts
Normal file
50
src/model/cloud-runner/cloud-runner.test.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { BuildParameters, ImageTag } from '..';
|
||||
import CloudRunner from './cloud-runner';
|
||||
import Input from '../input';
|
||||
import { CloudRunnerStatics } from './cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
import UnityVersioning from '../unity-versioning';
|
||||
|
||||
describe('Cloud Runner', () => {
|
||||
it('responds', () => {});
|
||||
});
|
||||
describe('Cloud Runner', () => {
|
||||
const testSecretName = 'testSecretName';
|
||||
const testSecretValue = 'testSecretValue';
|
||||
if (Input.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
Input.cliOptions = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
const newLinePurgedFile = file
|
||||
.replace(/\s+/g, '')
|
||||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.replace(/\s+/g, '');
|
||||
}
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
Input.githubInputEnabled = true;
|
||||
}, 1000000);
|
||||
}
|
||||
});
|
||||
72
src/model/cloud-runner/cloud-runner.ts
Normal file
72
src/model/cloud-runner/cloud-runner.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import AWSBuildPlatform from './aws';
|
||||
import { BuildParameters } from '..';
|
||||
import { CloudRunnerState } from './state/cloud-runner-state';
|
||||
import Kubernetes from './k8s';
|
||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
||||
import { CloudRunnerStepState } from './state/cloud-runner-step-state';
|
||||
import { WorkflowCompositionRoot } from './workflows/workflow-composition-root';
|
||||
import { CloudRunnerError } from './error/cloud-runner-error';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
import * as core from '@actions/core';
|
||||
|
||||
class CloudRunner {
|
||||
private static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerLogger.setup();
|
||||
CloudRunnerState.setup(buildParameters);
|
||||
CloudRunner.setupBuildPlatform();
|
||||
const parameters = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
for (const element of parameters) {
|
||||
core.setOutput(element.name, element.value);
|
||||
}
|
||||
}
|
||||
|
||||
private static setupBuildPlatform() {
|
||||
switch (CloudRunnerState.buildParams.cloudRunnerCluster) {
|
||||
case 'k8s':
|
||||
CloudRunnerLogger.log('Cloud Runner platform selected Kubernetes');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new Kubernetes(CloudRunnerState.buildParams);
|
||||
break;
|
||||
default:
|
||||
case 'aws':
|
||||
CloudRunnerLogger.log('Cloud Runner platform selected AWS');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new AWSBuildPlatform(CloudRunnerState.buildParams);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static async run(buildParameters: BuildParameters, baseImage: string) {
|
||||
CloudRunner.setup(buildParameters);
|
||||
try {
|
||||
core.startGroup('Setup remote runner');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
);
|
||||
core.endGroup();
|
||||
const output = await new WorkflowCompositionRoot().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
core.startGroup('Cleanup');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
);
|
||||
CloudRunnerLogger.log(`Cleanup complete`);
|
||||
core.endGroup();
|
||||
return output;
|
||||
} catch (error) {
|
||||
core.endGroup();
|
||||
await CloudRunnerError.handleException(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
export default CloudRunner;
|
||||
16
src/model/cloud-runner/error/cloud-runner-error.ts
Normal file
16
src/model/cloud-runner/error/cloud-runner-error.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import * as core from '@actions/core';
|
||||
|
||||
export class CloudRunnerError {
|
||||
public static async handleException(error: unknown) {
|
||||
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
|
||||
core.setFailed('Cloud Runner failed');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
);
|
||||
}
|
||||
}
|
||||
197
src/model/cloud-runner/k8s/index.ts
Normal file
197
src/model/cloud-runner/k8s/index.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
import { BuildParameters, Output } from '../..';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import KubernetesStorage from './kubernetes-storage';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import KubernetesTaskRunner from './kubernetes-task-runner';
|
||||
import KubernetesSecret from './kubernetes-secret';
|
||||
import waitUntil from 'async-wait-until';
|
||||
import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
|
||||
import KubernetesServiceAccount from './kubernetes-service-account';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
|
||||
class Kubernetes implements CloudRunnerProviderInterface {
|
||||
private kubeConfig: k8s.KubeConfig;
|
||||
private kubeClient: k8s.CoreV1Api;
|
||||
private kubeClientBatch: k8s.BatchV1Api;
|
||||
private buildGuid: string = '';
|
||||
private buildParameters: BuildParameters;
|
||||
private pvcName: string = '';
|
||||
private secretName: string = '';
|
||||
private jobName: string = '';
|
||||
private namespace: string;
|
||||
private podName: string = '';
|
||||
private containerName: string = '';
|
||||
private cleanupCronJobName: string = '';
|
||||
private serviceAccountName: string = '';
|
||||
|
||||
constructor(buildParameters: BuildParameters) {
|
||||
this.kubeConfig = new k8s.KubeConfig();
|
||||
this.kubeConfig.loadFromDefault();
|
||||
this.kubeClient = this.kubeConfig.makeApiClient(k8s.CoreV1Api);
|
||||
this.kubeClientBatch = this.kubeConfig.makeApiClient(k8s.BatchV1Api);
|
||||
CloudRunnerLogger.log('Loaded default Kubernetes configuration for this environment');
|
||||
|
||||
this.namespace = 'default';
|
||||
this.buildParameters = buildParameters;
|
||||
}
|
||||
public async setupSharedResources(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
try {
|
||||
this.pvcName = `unity-builder-pvc-${buildGuid}`;
|
||||
this.cleanupCronJobName = `unity-builder-cronjob-${buildGuid}`;
|
||||
this.serviceAccountName = `service-account-${buildGuid}`;
|
||||
await KubernetesStorage.createPersistentVolumeClaim(
|
||||
buildParameters,
|
||||
this.pvcName,
|
||||
this.kubeClient,
|
||||
this.namespace,
|
||||
);
|
||||
|
||||
await KubernetesServiceAccount.createServiceAccount(this.serviceAccountName, this.namespace, this.kubeClient);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async runTask(
|
||||
buildGuid: string,
|
||||
image: string,
|
||||
commands: string,
|
||||
mountdir: string,
|
||||
workingdir: string,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
try {
|
||||
// setup
|
||||
this.buildGuid = buildGuid;
|
||||
this.secretName = `build-credentials-${buildGuid}`;
|
||||
this.jobName = `unity-builder-job-${buildGuid}`;
|
||||
this.containerName = `main`;
|
||||
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
||||
const jobSpec = KubernetesJobSpecFactory.getJobSpec(
|
||||
commands,
|
||||
image,
|
||||
mountdir,
|
||||
workingdir,
|
||||
environment,
|
||||
secrets,
|
||||
this.buildGuid,
|
||||
this.buildParameters,
|
||||
this.secretName,
|
||||
this.pvcName,
|
||||
this.jobName,
|
||||
k8s,
|
||||
);
|
||||
|
||||
//run
|
||||
const jobResult = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||
CloudRunnerLogger.log(`Creating build job ${JSON.stringify(jobResult.body.metadata, undefined, 4)}`);
|
||||
|
||||
await new Promise((promise) => setTimeout(promise, 5000));
|
||||
CloudRunnerLogger.log('Job created');
|
||||
this.setPodNameAndContainerName(await Kubernetes.findPodFromJob(this.kubeClient, this.jobName, this.namespace));
|
||||
CloudRunnerLogger.log('Watching pod until running');
|
||||
let output = '';
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
try {
|
||||
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||
CloudRunnerLogger.log('Pod running, streaming logs');
|
||||
output = await KubernetesTaskRunner.runTask(
|
||||
this.kubeConfig,
|
||||
this.kubeClient,
|
||||
this.jobName,
|
||||
this.podName,
|
||||
'main',
|
||||
this.namespace,
|
||||
CloudRunnerLogger.log,
|
||||
);
|
||||
break;
|
||||
} catch (error: any) {
|
||||
if (error.message.includes(`HTTP`)) {
|
||||
continue;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
await this.cleanupTaskResources();
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log('Running job failed');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
await this.cleanupTaskResources();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
setPodNameAndContainerName(pod: k8s.V1Pod) {
|
||||
this.podName = pod.metadata?.name || '';
|
||||
this.containerName = pod.status?.containerStatuses?.[0].name || '';
|
||||
}
|
||||
|
||||
async cleanupTaskResources() {
|
||||
CloudRunnerLogger.log('cleaning up');
|
||||
try {
|
||||
await this.kubeClientBatch.deleteNamespacedJob(this.jobName, this.namespace);
|
||||
await this.kubeClient.deleteNamespacedPod(this.podName, this.namespace);
|
||||
await this.kubeClient.deleteNamespacedSecret(this.secretName, this.namespace);
|
||||
await new Promise((promise) => setTimeout(promise, 5000));
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log('Failed to cleanup, error:');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
CloudRunnerLogger.log('Abandoning cleanup, build error:');
|
||||
throw error;
|
||||
}
|
||||
try {
|
||||
await waitUntil(
|
||||
async () => {
|
||||
const jobBody = (await this.kubeClientBatch.readNamespacedJob(this.jobName, this.namespace)).body;
|
||||
const podBody = (await this.kubeClient.readNamespacedPod(this.podName, this.namespace)).body;
|
||||
return (jobBody === null || jobBody.status?.active === 0) && podBody === null;
|
||||
},
|
||||
{
|
||||
timeout: 500000,
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async cleanupSharedResources(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
CloudRunnerLogger.log(`deleting PVC`);
|
||||
await this.kubeClient.deleteNamespacedPersistentVolumeClaim(this.pvcName, this.namespace);
|
||||
await Output.setBuildVersion(buildParameters.buildVersion);
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit();
|
||||
}
|
||||
|
||||
static async findPodFromJob(kubeClient: CoreV1Api, jobName: string, namespace: string) {
|
||||
const namespacedPods = await kubeClient.listNamespacedPod(namespace);
|
||||
const pod = namespacedPods.body.items.find((x) => x.metadata?.labels?.['job-name'] === jobName);
|
||||
if (pod === undefined) {
|
||||
throw new Error("pod with job-name label doesn't exist");
|
||||
}
|
||||
return pod;
|
||||
}
|
||||
}
|
||||
export default Kubernetes;
|
||||
161
src/model/cloud-runner/k8s/kubernetes-job-spec-factory.ts
Normal file
161
src/model/cloud-runner/k8s/kubernetes-job-spec-factory.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
|
||||
class KubernetesJobSpecFactory {
|
||||
static getJobSpec(
|
||||
command: string,
|
||||
image: string,
|
||||
mountdir: string,
|
||||
workingDirectory: string,
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
secretName,
|
||||
pvcName,
|
||||
jobName,
|
||||
k8s,
|
||||
) {
|
||||
environment.push(
|
||||
...[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: buildGuid,
|
||||
},
|
||||
{
|
||||
name: 'GITHUB_WORKSPACE',
|
||||
value: '/data/repo',
|
||||
},
|
||||
{
|
||||
name: 'PROJECT_PATH',
|
||||
value: buildParameters.projectPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_PATH',
|
||||
value: buildParameters.buildPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_FILE',
|
||||
value: buildParameters.buildFile,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_NAME',
|
||||
value: buildParameters.buildName,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_METHOD',
|
||||
value: buildParameters.buildMethod,
|
||||
},
|
||||
{
|
||||
name: 'CUSTOM_PARAMETERS',
|
||||
value: buildParameters.customParameters,
|
||||
},
|
||||
{
|
||||
name: 'CHOWN_FILES_TO',
|
||||
value: buildParameters.chownFilesTo,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: buildParameters.platform,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_VERSION_CODE',
|
||||
value: buildParameters.androidVersionCode.toString(),
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYSTORE_NAME',
|
||||
value: buildParameters.androidKeystoreName,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYALIAS_NAME',
|
||||
value: buildParameters.androidKeyaliasName,
|
||||
},
|
||||
],
|
||||
);
|
||||
const job = new k8s.V1Job();
|
||||
job.apiVersion = 'batch/v1';
|
||||
job.kind = 'Job';
|
||||
job.metadata = {
|
||||
name: jobName,
|
||||
labels: {
|
||||
app: 'unity-builder',
|
||||
buildGuid,
|
||||
},
|
||||
};
|
||||
job.spec = {
|
||||
backoffLimit: 0,
|
||||
template: {
|
||||
spec: {
|
||||
volumes: [
|
||||
{
|
||||
name: 'build-mount',
|
||||
persistentVolumeClaim: {
|
||||
claimName: pvcName,
|
||||
},
|
||||
},
|
||||
],
|
||||
containers: [
|
||||
{
|
||||
name: 'main',
|
||||
image,
|
||||
command: ['/bin/sh'],
|
||||
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunnerState.buildParams)],
|
||||
|
||||
workingDir: `${workingDirectory}`,
|
||||
resources: {
|
||||
requests: {
|
||||
memory: buildParameters.cloudRunnerMemory,
|
||||
cpu: buildParameters.cloudRunnerCpu,
|
||||
},
|
||||
},
|
||||
env: [
|
||||
...environment.map((x) => {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.name;
|
||||
environmentVariable.value = x.value;
|
||||
return environmentVariable;
|
||||
}),
|
||||
...secrets.map((x) => {
|
||||
const secret = new V1EnvVarSource();
|
||||
secret.secretKeyRef = new V1SecretKeySelector();
|
||||
secret.secretKeyRef.key = x.ParameterKey;
|
||||
secret.secretKeyRef.name = secretName;
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.EnvironmentVariable;
|
||||
environmentVariable.valueFrom = secret;
|
||||
return environmentVariable;
|
||||
}),
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: 'build-mount',
|
||||
mountPath: `/${mountdir}`,
|
||||
},
|
||||
],
|
||||
lifecycle: {
|
||||
preStop: {
|
||||
exec: {
|
||||
command: [
|
||||
'bin/bash',
|
||||
'-c',
|
||||
`cd /data/builder/action/steps;
|
||||
chmod +x /return_license.sh;
|
||||
/return_license.sh;`,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
restartPolicy: 'Never',
|
||||
},
|
||||
},
|
||||
};
|
||||
return job;
|
||||
}
|
||||
}
|
||||
export default KubernetesJobSpecFactory;
|
||||
28
src/model/cloud-runner/k8s/kubernetes-secret.ts
Normal file
28
src/model/cloud-runner/k8s/kubernetes-secret.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
const base64 = require('base-64');
|
||||
|
||||
class KubernetesSecret {
|
||||
static async createSecret(
|
||||
secrets: CloudRunnerSecret[],
|
||||
secretName: string,
|
||||
namespace: string,
|
||||
kubeClient: CoreV1Api,
|
||||
) {
|
||||
const secret = new k8s.V1Secret();
|
||||
secret.apiVersion = 'v1';
|
||||
secret.kind = 'Secret';
|
||||
secret.type = 'Opaque';
|
||||
secret.metadata = {
|
||||
name: secretName,
|
||||
};
|
||||
secret.data = {};
|
||||
for (const buildSecret of secrets) {
|
||||
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
|
||||
}
|
||||
return kubeClient.createNamespacedSecret(namespace, secret);
|
||||
}
|
||||
}
|
||||
|
||||
export default KubernetesSecret;
|
||||
17
src/model/cloud-runner/k8s/kubernetes-service-account.ts
Normal file
17
src/model/cloud-runner/k8s/kubernetes-service-account.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
|
||||
class KubernetesServiceAccount {
|
||||
static async createServiceAccount(serviceAccountName: string, namespace: string, kubeClient: CoreV1Api) {
|
||||
const serviceAccount = new k8s.V1ServiceAccount();
|
||||
serviceAccount.apiVersion = 'v1';
|
||||
serviceAccount.kind = 'ServiceAccount';
|
||||
serviceAccount.metadata = {
|
||||
name: serviceAccountName,
|
||||
};
|
||||
serviceAccount.automountServiceAccountToken = false;
|
||||
return kubeClient.createNamespacedServiceAccount(namespace, serviceAccount);
|
||||
}
|
||||
}
|
||||
|
||||
export default KubernetesServiceAccount;
|
||||
114
src/model/cloud-runner/k8s/kubernetes-storage.ts
Normal file
114
src/model/cloud-runner/k8s/kubernetes-storage.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import waitUntil from 'async-wait-until';
|
||||
import * as core from '@actions/core';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import YAML from 'yaml';
|
||||
|
||||
class KubernetesStorage {
|
||||
public static async createPersistentVolumeClaim(
|
||||
buildParameters: BuildParameters,
|
||||
pvcName: string,
|
||||
kubeClient: k8s.CoreV1Api,
|
||||
namespace: string,
|
||||
) {
|
||||
if (buildParameters.kubeVolume) {
|
||||
CloudRunnerLogger.log(buildParameters.kubeVolume);
|
||||
pvcName = buildParameters.kubeVolume;
|
||||
return;
|
||||
}
|
||||
const pvcList = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items.map(
|
||||
(x) => x.metadata?.name,
|
||||
);
|
||||
CloudRunnerLogger.log(`Current PVCs in namespace ${namespace}`);
|
||||
CloudRunnerLogger.log(JSON.stringify(pvcList, undefined, 4));
|
||||
if (pvcList.includes(pvcName)) {
|
||||
CloudRunnerLogger.log(`pvc ${pvcName} already exists`);
|
||||
core.setOutput('volume', pvcName);
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Creating PVC ${pvcName} (does not exist)`);
|
||||
const result = await KubernetesStorage.createPVC(pvcName, buildParameters, kubeClient, namespace);
|
||||
await KubernetesStorage.handleResult(result, kubeClient, namespace, pvcName);
|
||||
}
|
||||
|
||||
public static async getPVCPhase(kubeClient: k8s.CoreV1Api, name: string, namespace: string) {
|
||||
try {
|
||||
return (await kubeClient.readNamespacedPersistentVolumeClaim(name, namespace)).body.status?.phase;
|
||||
} catch (error) {
|
||||
core.error('Failed to get PVC phase');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public static async watchUntilPVCNotPending(kubeClient: k8s.CoreV1Api, name: string, namespace: string) {
|
||||
try {
|
||||
CloudRunnerLogger.log(`watch Until PVC Not Pending ${name} ${namespace}`);
|
||||
CloudRunnerLogger.log(`${await this.getPVCPhase(kubeClient, name, namespace)}`);
|
||||
await waitUntil(
|
||||
async () => {
|
||||
return (await this.getPVCPhase(kubeClient, name, namespace)) !== 'Pending';
|
||||
},
|
||||
{
|
||||
timeout: 500000,
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
} catch (error: any) {
|
||||
core.error('Failed to watch PVC');
|
||||
core.error(error.toString());
|
||||
core.error(
|
||||
`PVC Body: ${JSON.stringify(
|
||||
(await kubeClient.readNamespacedPersistentVolumeClaim(name, namespace)).body,
|
||||
undefined,
|
||||
4,
|
||||
)}`,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async createPVC(
|
||||
pvcName: string,
|
||||
buildParameters: BuildParameters,
|
||||
kubeClient: k8s.CoreV1Api,
|
||||
namespace: string,
|
||||
) {
|
||||
const pvc = new k8s.V1PersistentVolumeClaim();
|
||||
pvc.apiVersion = 'v1';
|
||||
pvc.kind = 'PersistentVolumeClaim';
|
||||
pvc.metadata = {
|
||||
name: pvcName,
|
||||
};
|
||||
pvc.spec = {
|
||||
accessModes: ['ReadWriteOnce'],
|
||||
storageClassName: process.env.K8s_STORAGE_CLASS || 'standard',
|
||||
resources: {
|
||||
requests: {
|
||||
storage: buildParameters.kubeVolumeSize,
|
||||
},
|
||||
},
|
||||
};
|
||||
if (process.env.K8s_STORAGE_PVC_SPEC) {
|
||||
YAML.parse(process.env.K8s_STORAGE_PVC_SPEC);
|
||||
}
|
||||
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static async handleResult(
|
||||
result: { response: import('http').IncomingMessage; body: k8s.V1PersistentVolumeClaim },
|
||||
kubeClient: k8s.CoreV1Api,
|
||||
namespace: string,
|
||||
pvcName: string,
|
||||
) {
|
||||
const name = result.body.metadata?.name || '';
|
||||
CloudRunnerLogger.log(`PVC ${name} created`);
|
||||
await this.watchUntilPVCNotPending(kubeClient, name, namespace);
|
||||
CloudRunnerLogger.log(`PVC ${name} is ready and not pending`);
|
||||
core.setOutput('volume', pvcName);
|
||||
}
|
||||
}
|
||||
|
||||
export default KubernetesStorage;
|
||||
104
src/model/cloud-runner/k8s/kubernetes-task-runner.ts
Normal file
104
src/model/cloud-runner/k8s/kubernetes-task-runner.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { CoreV1Api, KubeConfig, Log } from '@kubernetes/client-node';
|
||||
import { Writable } from 'stream';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import waitUntil from 'async-wait-until';
|
||||
import { Input } from '../..';
|
||||
|
||||
class KubernetesTaskRunner {
|
||||
static async runTask(
|
||||
kubeConfig: KubeConfig,
|
||||
kubeClient: CoreV1Api,
|
||||
jobName: string,
|
||||
podName: string,
|
||||
containerName: string,
|
||||
namespace: string,
|
||||
logCallback: any,
|
||||
) {
|
||||
CloudRunnerLogger.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace}`);
|
||||
const stream = new Writable();
|
||||
let output = '';
|
||||
let didStreamAnyLogs: boolean = false;
|
||||
stream._write = (chunk, encoding, next) => {
|
||||
didStreamAnyLogs = true;
|
||||
let message = chunk.toString().trimRight(`\n`);
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
logCallback(message);
|
||||
next();
|
||||
};
|
||||
const logOptions = {
|
||||
follow: true,
|
||||
pretty: false,
|
||||
previous: false,
|
||||
};
|
||||
try {
|
||||
const resultError = await new Promise((resolve) =>
|
||||
new Log(kubeConfig).log(namespace, podName, containerName, stream, resolve, logOptions),
|
||||
);
|
||||
stream.destroy();
|
||||
if (resultError) {
|
||||
throw resultError;
|
||||
}
|
||||
if (!didStreamAnyLogs) {
|
||||
core.error('Failed to stream any logs, listing namespace events, check for an error with the container');
|
||||
core.error(
|
||||
JSON.stringify(
|
||||
{
|
||||
events: (await kubeClient.listNamespacedEvent(namespace)).body.items
|
||||
.filter((x) => {
|
||||
return x.involvedObject.name === podName || x.involvedObject.name === jobName;
|
||||
})
|
||||
.map((x) => {
|
||||
return {
|
||||
type: x.involvedObject.kind,
|
||||
name: x.involvedObject.name,
|
||||
message: x.message,
|
||||
};
|
||||
}),
|
||||
},
|
||||
undefined,
|
||||
4,
|
||||
),
|
||||
);
|
||||
throw new Error(`No logs streamed from k8s`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (stream) {
|
||||
stream.destroy();
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
CloudRunnerLogger.log('end of log stream');
|
||||
return output;
|
||||
}
|
||||
|
||||
static async watchUntilPodRunning(kubeClient: CoreV1Api, podName: string, namespace: string) {
|
||||
let success: boolean = false;
|
||||
CloudRunnerLogger.log(`Watching ${podName} ${namespace}`);
|
||||
await waitUntil(
|
||||
async () => {
|
||||
const status = await kubeClient.readNamespacedPodStatus(podName, namespace);
|
||||
const phase = status?.body.status?.phase;
|
||||
success = phase === 'Running';
|
||||
CloudRunnerLogger.log(
|
||||
`${status.body.status?.phase} ${status.body.status?.conditions?.[0].reason || ''} ${
|
||||
status.body.status?.conditions?.[0].message || ''
|
||||
}`,
|
||||
);
|
||||
if (success || phase !== 'Pending') return true;
|
||||
return false;
|
||||
},
|
||||
{
|
||||
timeout: 2000000,
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
return success;
|
||||
}
|
||||
}
|
||||
|
||||
export default KubernetesTaskRunner;
|
||||
@@ -0,0 +1,40 @@
|
||||
import { BuildParameters, Input } from '../..';
|
||||
import YAML from 'yaml';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerBuildCommandProcessor {
|
||||
public static ProcessCommands(commands: string, buildParameters: BuildParameters): string {
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`all`));
|
||||
|
||||
return `echo "---"
|
||||
echo "start cloud runner init"
|
||||
${Input.cloudRunnerTests ? '' : '#'} printenv
|
||||
echo "start cloud runner job"
|
||||
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
${commands}
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
echo "end of cloud runner job
|
||||
---${buildParameters.logId}"
|
||||
`;
|
||||
}
|
||||
|
||||
public static getHooks(): Hook[] {
|
||||
const experimentHooks = process.env.EXPERIMENTAL_HOOKS;
|
||||
let output = new Array<Hook>();
|
||||
if (experimentHooks && experimentHooks !== '') {
|
||||
try {
|
||||
output = YAML.parse(experimentHooks);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
|
||||
}
|
||||
}
|
||||
export class Hook {
|
||||
public commands;
|
||||
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
|
||||
public name;
|
||||
public hook!: string[];
|
||||
public step!: string[];
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
class CloudRunnerConstants {
|
||||
static alphabet = '0123456789abcdefghijklmnopqrstuvwxyz';
|
||||
}
|
||||
export default CloudRunnerConstants;
|
||||
@@ -0,0 +1,5 @@
|
||||
class CloudRunnerEnvironmentVariable {
|
||||
public name!: string;
|
||||
public value!: string;
|
||||
}
|
||||
export default CloudRunnerEnvironmentVariable;
|
||||
47
src/model/cloud-runner/services/cloud-runner-logger.ts
Normal file
47
src/model/cloud-runner/services/cloud-runner-logger.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import * as core from '@actions/core';
|
||||
|
||||
class CloudRunnerLogger {
|
||||
private static timestamp: number;
|
||||
private static globalTimestamp: number;
|
||||
|
||||
public static setup() {
|
||||
this.timestamp = this.createTimestamp();
|
||||
this.globalTimestamp = this.timestamp;
|
||||
}
|
||||
|
||||
public static log(message: string) {
|
||||
core.info(message);
|
||||
}
|
||||
|
||||
public static logWarning(message: string) {
|
||||
core.warning(message);
|
||||
}
|
||||
|
||||
public static logLine(message: string) {
|
||||
core.info(`${message}\n`);
|
||||
}
|
||||
|
||||
public static error(message: string) {
|
||||
core.error(message);
|
||||
}
|
||||
|
||||
public static logWithTime(message: string) {
|
||||
const newTimestamp = this.createTimestamp();
|
||||
core.info(
|
||||
`${message} (Since previous: ${this.calculateTimeDiff(
|
||||
newTimestamp,
|
||||
this.timestamp,
|
||||
)}, Total time: ${this.calculateTimeDiff(newTimestamp, this.globalTimestamp)})`,
|
||||
);
|
||||
this.timestamp = newTimestamp;
|
||||
}
|
||||
|
||||
private static calculateTimeDiff(x: number, y: number) {
|
||||
return Math.floor((x - y) / 1000);
|
||||
}
|
||||
|
||||
private static createTimestamp() {
|
||||
return Date.now();
|
||||
}
|
||||
}
|
||||
export default CloudRunnerLogger;
|
||||
10
src/model/cloud-runner/services/cloud-runner-namespace.ts
Normal file
10
src/model/cloud-runner/services/cloud-runner-namespace.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import CloudRunnerConstants from './cloud-runner-constants';
|
||||
|
||||
class CloudRunnerNamespace {
|
||||
static generateBuildName(runNumber: string | number, platform: string) {
|
||||
const nanoid = customAlphabet(CloudRunnerConstants.alphabet, 4);
|
||||
return `${runNumber}-${platform.toLowerCase().replace('standalone', '')}-${nanoid()}`;
|
||||
}
|
||||
}
|
||||
export default CloudRunnerNamespace;
|
||||
@@ -0,0 +1,42 @@
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
|
||||
export interface CloudRunnerProviderInterface {
|
||||
cleanupSharedResources(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
);
|
||||
setupSharedResources(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
);
|
||||
runTask(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
image: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
commands: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
mountdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
workingdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string>;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
class RemoteBuilderSecret {
|
||||
class CloudRunnerSecret {
|
||||
public ParameterKey!: string;
|
||||
public EnvironmentVariable!: string;
|
||||
public ParameterValue!: string;
|
||||
}
|
||||
export default RemoteBuilderSecret;
|
||||
export default CloudRunnerSecret;
|
||||
85
src/model/cloud-runner/services/task-parameter-serializer.ts
Normal file
85
src/model/cloud-runner/services/task-parameter-serializer.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { Input } from '../..';
|
||||
import ImageEnvironmentFactory from '../../image-environment-factory';
|
||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerBuildCommandProcessor } from './cloud-runner-build-command-process';
|
||||
|
||||
export class TaskParameterSerializer {
|
||||
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
|
||||
TaskParameterSerializer.setupDefaultSecrets();
|
||||
return [
|
||||
{
|
||||
name: 'ContainerMemory',
|
||||
value: CloudRunnerState.buildParams.cloudRunnerMemory,
|
||||
},
|
||||
{
|
||||
name: 'ContainerCpu',
|
||||
value: CloudRunnerState.buildParams.cloudRunnerCpu,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: CloudRunnerState.buildParams.platform,
|
||||
},
|
||||
...TaskParameterSerializer.serializeBuildParamsAndInput,
|
||||
];
|
||||
}
|
||||
private static get serializeBuildParamsAndInput() {
|
||||
let array = new Array();
|
||||
array = TaskParameterSerializer.readBuildParameters(array);
|
||||
array = TaskParameterSerializer.readInput(array);
|
||||
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks();
|
||||
const secrets = configurableHooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
|
||||
if (secrets.length > 0) {
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
array.push(secrets.reduce((x, y) => [...x, ...y]));
|
||||
}
|
||||
|
||||
array = array.filter(
|
||||
(x) => x.value !== undefined && x.name !== '0' && x.value !== '' && x.name !== 'prototype' && x.name !== 'length',
|
||||
);
|
||||
array = array.map((x) => {
|
||||
x.name = Input.ToEnvVarFormat(x.name);
|
||||
x.value = `${x.value}`;
|
||||
return x;
|
||||
});
|
||||
return array;
|
||||
}
|
||||
|
||||
private static readBuildParameters(array: any[]) {
|
||||
const keys = Object.keys(CloudRunnerState.buildParams);
|
||||
for (const element of keys) {
|
||||
array.push({
|
||||
name: element,
|
||||
value: CloudRunnerState.buildParams[element],
|
||||
});
|
||||
}
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunnerState.buildParams) });
|
||||
return array;
|
||||
}
|
||||
|
||||
private static readInput(array: any[]) {
|
||||
const input = Object.getOwnPropertyNames(Input);
|
||||
for (const element of input) {
|
||||
if (typeof Input[element] !== 'function' && array.filter((x) => x.name === element).length === 0) {
|
||||
array.push({
|
||||
name: element,
|
||||
value: `${Input[element]}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
private static setupDefaultSecrets() {
|
||||
if (CloudRunnerState.defaultSecrets === undefined)
|
||||
CloudRunnerState.defaultSecrets = ImageEnvironmentFactory.getEnvironmentVariables(
|
||||
CloudRunnerState.buildParams,
|
||||
).map((x) => {
|
||||
return {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: x.name,
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
81
src/model/cloud-runner/state/cloud-runner-state.ts
Normal file
81
src/model/cloud-runner/state/cloud-runner-state.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import path from 'path';
|
||||
import { BuildParameters } from '../..';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerState {
|
||||
public static CloudRunnerProviderPlatform: CloudRunnerProviderInterface;
|
||||
public static buildParams: BuildParameters;
|
||||
public static defaultSecrets: CloudRunnerSecret[];
|
||||
public static readonly repositoryFolder = 'repo';
|
||||
|
||||
// only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
|
||||
public static get buildPathFull(): string {
|
||||
return path.join(`/`, CloudRunnerState.buildVolumeFolder, CloudRunnerState.buildParams.buildGuid);
|
||||
}
|
||||
|
||||
public static get cacheFolderFull(): string {
|
||||
return path.join(
|
||||
'/',
|
||||
CloudRunnerState.buildVolumeFolder,
|
||||
CloudRunnerState.cacheFolder,
|
||||
CloudRunnerState.branchName,
|
||||
);
|
||||
}
|
||||
|
||||
static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerState.buildParams = buildParameters;
|
||||
}
|
||||
|
||||
public static get branchName(): string {
|
||||
return CloudRunnerState.buildParams.branch;
|
||||
}
|
||||
public static get builderPathFull(): string {
|
||||
return path.join(CloudRunnerState.buildPathFull, `builder`);
|
||||
}
|
||||
|
||||
public static get repoPathFull(): string {
|
||||
return path.join(CloudRunnerState.buildPathFull, CloudRunnerState.repositoryFolder);
|
||||
}
|
||||
|
||||
public static get projectPathFull(): string {
|
||||
return path.join(CloudRunnerState.repoPathFull, CloudRunnerState.buildParams.projectPath);
|
||||
}
|
||||
|
||||
public static get libraryFolderFull(): string {
|
||||
return path.join(CloudRunnerState.projectPathFull, `Library`);
|
||||
}
|
||||
|
||||
public static get lfsDirectoryFull(): string {
|
||||
return path.join(CloudRunnerState.repoPathFull, `.git`, `lfs`);
|
||||
}
|
||||
|
||||
public static get purgeRemoteCaching(): boolean {
|
||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
}
|
||||
|
||||
public static get lfsCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
}
|
||||
|
||||
public static get libraryCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `Library`);
|
||||
}
|
||||
|
||||
public static get unityBuilderRepoUrl(): string {
|
||||
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/game-ci/unity-builder.git`;
|
||||
}
|
||||
|
||||
public static get targetBuildRepoUrl(): string {
|
||||
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/${CloudRunnerState.buildParams.githubRepo}.git`;
|
||||
}
|
||||
|
||||
public static get buildVolumeFolder() {
|
||||
return 'data';
|
||||
}
|
||||
|
||||
public static get cacheFolder() {
|
||||
return 'cache';
|
||||
}
|
||||
}
|
||||
13
src/model/cloud-runner/state/cloud-runner-step-state.ts
Normal file
13
src/model/cloud-runner/state/cloud-runner-step-state.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerStepState {
|
||||
public image: string;
|
||||
public environment: CloudRunnerEnvironmentVariable[];
|
||||
public secrets: CloudRunnerSecret[];
|
||||
constructor(image: string, environmentVariables: CloudRunnerEnvironmentVariable[], secrets: CloudRunnerSecret[]) {
|
||||
this.image = image;
|
||||
this.environment = environmentVariables;
|
||||
this.secrets = secrets;
|
||||
}
|
||||
}
|
||||
77
src/model/cloud-runner/steps/build-step.ts
Normal file
77
src/model/cloud-runner/steps/build-step.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import path from 'path';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { StepInterface } from './step-interface';
|
||||
|
||||
export class BuildStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
return await BuildStep.BuildStep(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
);
|
||||
}
|
||||
|
||||
private static async BuildStep(
|
||||
image: string,
|
||||
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
CloudRunnerLogger.logLine(` `);
|
||||
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GITHUB_WORKSPACE="${CloudRunnerState.repoPathFull}"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'default-build-script')
|
||||
.replace(/\\/g, `/`)}" "/UnityBuilderAction"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'entrypoint.sh')
|
||||
.replace(/\\/g, `/`)}" "/entrypoint.sh"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'steps')
|
||||
.replace(/\\/g, `/`)}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
/entrypoint.sh
|
||||
apt-get update
|
||||
apt-get install -y -q zip tree
|
||||
cd "${CloudRunnerState.libraryFolderFull.replace(/\\/g, `/`)}/.."
|
||||
zip -r "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "Library"
|
||||
mv "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
|
||||
/\\/g,
|
||||
`/`,
|
||||
)}/Library"
|
||||
cd "${CloudRunnerState.repoPathFull.replace(/\\/g, `/`)}"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh
|
||||
zip -r "build-${CloudRunnerState.buildParams.buildGuid}.zip" "build"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}"
|
||||
mv "build-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
|
||||
/\\/g,
|
||||
`/`,
|
||||
)}"
|
||||
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
|
||||
node ${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', `index.js`)
|
||||
.replace(/\\/g, `/`)} -m cache-push "Library" "lib-${
|
||||
CloudRunnerState.buildParams.buildGuid
|
||||
}.zip" "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}/Library"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull}"
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.projectPathFull}`,
|
||||
environmentVariables,
|
||||
secrets,
|
||||
);
|
||||
}
|
||||
}
|
||||
59
src/model/cloud-runner/steps/setup-step.ts
Normal file
59
src/model/cloud-runner/steps/setup-step.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import path from 'path';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { StepInterface } from './step-interface';
|
||||
|
||||
export class SetupStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
return await SetupStep.downloadRepository(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async downloadRepository(
|
||||
image: string,
|
||||
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
try {
|
||||
CloudRunnerLogger.log(` `);
|
||||
CloudRunnerLogger.logLine('Starting step 1/2 (setup game files from repository)');
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`apk update -q
|
||||
apk add git-lfs jq tree zip unzip nodejs -q
|
||||
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
mkdir -p ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
|
||||
git clone -q -b ${CloudRunnerState.branchName} ${
|
||||
CloudRunnerState.unityBuilderRepoUrl
|
||||
} "${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
|
||||
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
|
||||
node ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)} -m remote-cli
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}/`,
|
||||
environmentVariables,
|
||||
secrets,
|
||||
);
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.logLine(`Failed download repository step 1/2`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
8
src/model/cloud-runner/steps/step-interface.ts
Normal file
8
src/model/cloud-runner/steps/step-interface.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
|
||||
export interface StepInterface {
|
||||
run(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
cloudRunnerStepState: CloudRunnerStepState,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { BuildStep } from '../steps/build-step';
|
||||
import { SetupStep } from '../steps/setup-step';
|
||||
import { CustomWorkflow } from './custom-workflow';
|
||||
import { WorkflowInterface } from './workflow-interface';
|
||||
import * as core from '@actions/core';
|
||||
|
||||
export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async standardBuildAutomation(baseImage: any) {
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||
|
||||
core.startGroup('pre build steps');
|
||||
let output = '';
|
||||
if (CloudRunnerState.buildParams.preBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
|
||||
}
|
||||
core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Configurable pre build step(s) time');
|
||||
|
||||
core.startGroup('setup');
|
||||
output += await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
'alpine/git',
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Download repository step time');
|
||||
|
||||
core.startGroup('build');
|
||||
output += await new BuildStep().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Build time');
|
||||
|
||||
core.startGroup('post build steps');
|
||||
if (CloudRunnerState.buildParams.postBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
|
||||
}
|
||||
core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Configurable post build step(s) time');
|
||||
|
||||
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
46
src/model/cloud-runner/workflows/custom-workflow.ts
Normal file
46
src/model/cloud-runner/workflows/custom-workflow.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import YAML from 'yaml';
|
||||
import { Input } from '../..';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
|
||||
export class CustomWorkflow {
|
||||
public static async runCustomJob(buildSteps) {
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
CloudRunnerLogger.log(`Parsing build steps: ${buildSteps}`);
|
||||
}
|
||||
try {
|
||||
buildSteps = YAML.parse(buildSteps);
|
||||
let output = '';
|
||||
for (const step of buildSteps) {
|
||||
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
|
||||
const secret: CloudRunnerSecret = {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
return secret;
|
||||
});
|
||||
output += await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
step['image'],
|
||||
step['commands'],
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}/`,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
[...CloudRunnerState.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
}
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { CustomWorkflow } from './custom-workflow';
|
||||
import { WorkflowInterface } from './workflow-interface';
|
||||
import { BuildAutomationWorkflow } from './build-automation-workflow';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import { SetupStep } from '../steps/setup-step';
|
||||
|
||||
export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
return await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async runJob(baseImage: any) {
|
||||
try {
|
||||
if (CloudRunnerState.buildParams.customJob === `setup`) {
|
||||
return await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
} else if (CloudRunnerState.buildParams.customJob !== '') {
|
||||
return await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
|
||||
}
|
||||
return await new BuildAutomationWorkflow().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
8
src/model/cloud-runner/workflows/workflow-interface.ts
Normal file
8
src/model/cloud-runner/workflows/workflow-interface.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
|
||||
export interface WorkflowInterface {
|
||||
run(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
cloudRunnerStepState: CloudRunnerStepState,
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { exec } from '@actions/exec';
|
||||
import ImageTag from './image-tag';
|
||||
import ImageEnvironmentFactory from './image-environment-factory';
|
||||
|
||||
class Docker {
|
||||
static async build(buildParameters, silent = false) {
|
||||
@@ -18,31 +19,7 @@ class Docker {
|
||||
}
|
||||
|
||||
static async run(image, parameters, silent = false) {
|
||||
const {
|
||||
version,
|
||||
workspace,
|
||||
unitySerial,
|
||||
runnerTempPath,
|
||||
platform,
|
||||
projectPath,
|
||||
buildName,
|
||||
buildPath,
|
||||
buildFile,
|
||||
buildMethod,
|
||||
buildVersion,
|
||||
androidVersionCode,
|
||||
androidKeystoreName,
|
||||
androidKeystoreBase64,
|
||||
androidKeystorePass,
|
||||
androidKeyaliasName,
|
||||
androidKeyaliasPass,
|
||||
androidTargetSdkVersion,
|
||||
androidSdkManagerParameters,
|
||||
customParameters,
|
||||
sshAgent,
|
||||
gitPrivateToken,
|
||||
chownFilesTo,
|
||||
} = parameters;
|
||||
const { workspace, unitySerial, runnerTempPath, sshAgent } = parameters;
|
||||
|
||||
const baseOsSpecificArguments = this.getBaseOsSpecificArguments(
|
||||
process.platform,
|
||||
@@ -55,45 +32,7 @@ class Docker {
|
||||
const runCommand = `docker run \
|
||||
--workdir /github/workspace \
|
||||
--rm \
|
||||
--env UNITY_LICENSE \
|
||||
--env UNITY_LICENSE_FILE \
|
||||
--env UNITY_EMAIL \
|
||||
--env UNITY_PASSWORD \
|
||||
--env UNITY_VERSION="${version}" \
|
||||
--env USYM_UPLOAD_AUTH_TOKEN \
|
||||
--env PROJECT_PATH="${projectPath}" \
|
||||
--env BUILD_TARGET="${platform}" \
|
||||
--env BUILD_NAME="${buildName}" \
|
||||
--env BUILD_PATH="${buildPath}" \
|
||||
--env BUILD_FILE="${buildFile}" \
|
||||
--env BUILD_METHOD="${buildMethod}" \
|
||||
--env VERSION="${buildVersion}" \
|
||||
--env ANDROID_VERSION_CODE="${androidVersionCode}" \
|
||||
--env ANDROID_KEYSTORE_NAME="${androidKeystoreName}" \
|
||||
--env ANDROID_KEYSTORE_BASE64="${androidKeystoreBase64}" \
|
||||
--env ANDROID_KEYSTORE_PASS="${androidKeystorePass}" \
|
||||
--env ANDROID_KEYALIAS_NAME="${androidKeyaliasName}" \
|
||||
--env ANDROID_KEYALIAS_PASS="${androidKeyaliasPass}" \
|
||||
--env ANDROID_TARGET_SDK_VERSION="${androidTargetSdkVersion}" \
|
||||
--env ANDROID_SDK_MANAGER_PARAMETERS="${androidSdkManagerParameters}" \
|
||||
--env CUSTOM_PARAMETERS="${customParameters}" \
|
||||
--env CHOWN_FILES_TO="${chownFilesTo}" \
|
||||
--env GITHUB_REF \
|
||||
--env GITHUB_SHA \
|
||||
--env GITHUB_REPOSITORY \
|
||||
--env GITHUB_ACTOR \
|
||||
--env GITHUB_WORKFLOW \
|
||||
--env GITHUB_HEAD_REF \
|
||||
--env GITHUB_BASE_REF \
|
||||
--env GITHUB_EVENT_NAME \
|
||||
--env GITHUB_WORKSPACE=/github/workspace \
|
||||
--env GITHUB_ACTION \
|
||||
--env GITHUB_EVENT_PATH \
|
||||
--env RUNNER_OS \
|
||||
--env RUNNER_TOOL_CACHE \
|
||||
--env RUNNER_TEMP \
|
||||
--env RUNNER_WORKSPACE \
|
||||
--env GIT_PRIVATE_TOKEN="${gitPrivateToken}" \
|
||||
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
|
||||
${baseOsSpecificArguments} \
|
||||
${image}`;
|
||||
|
||||
|
||||
70
src/model/image-environment-factory.ts
Normal file
70
src/model/image-environment-factory.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import BuildParameters from './build-parameters';
|
||||
import { ReadLicense } from './input-readers/test-license-reader';
|
||||
|
||||
class Parameter {
|
||||
public name;
|
||||
public value;
|
||||
}
|
||||
|
||||
class ImageEnvironmentFactory {
|
||||
public static getEnvVarString(parameters) {
|
||||
const environmentVariables = ImageEnvironmentFactory.getEnvironmentVariables(parameters);
|
||||
let string = '';
|
||||
for (const p of environmentVariables) {
|
||||
if (p.value === '' || p.value === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (p.value.toString().includes(`\n`)) {
|
||||
string += `--env ${p.name} `;
|
||||
continue;
|
||||
}
|
||||
string += `--env ${p.name}="${p.value}" `;
|
||||
}
|
||||
return string;
|
||||
}
|
||||
public static getEnvironmentVariables(parameters: BuildParameters) {
|
||||
const environmentVariables: Parameter[] = [
|
||||
{ name: 'UNITY_LICENSE', value: process.env.UNITY_LICENSE || ReadLicense() },
|
||||
{ name: 'UNITY_LICENSE_FILE', value: process.env.UNITY_LICENSE_FILE },
|
||||
{ name: 'UNITY_EMAIL', value: process.env.UNITY_EMAIL },
|
||||
{ name: 'UNITY_PASSWORD', value: process.env.UNITY_PASSWORD },
|
||||
{ name: 'UNITY_SERIAL', value: parameters.unitySerial },
|
||||
{ name: 'UNITY_VERSION', value: parameters.version },
|
||||
{ name: 'USYM_UPLOAD_AUTH_TOKEN', value: process.env.USYM_UPLOAD_AUTH_TOKEN },
|
||||
{ name: 'PROJECT_PATH', value: parameters.projectPath },
|
||||
{ name: 'BUILD_TARGET', value: parameters.platform },
|
||||
{ name: 'BUILD_NAME', value: parameters.buildName },
|
||||
{ name: 'BUILD_PATH', value: parameters.buildPath },
|
||||
{ name: 'BUILD_FILE', value: parameters.buildFile },
|
||||
{ name: 'BUILD_METHOD', value: parameters.buildMethod },
|
||||
{ name: 'VERSION', value: parameters.buildVersion },
|
||||
{ name: 'ANDROID_VERSION_CODE', value: parameters.androidVersionCode },
|
||||
{ name: 'ANDROID_KEYSTORE_NAME', value: parameters.androidKeystoreName },
|
||||
{ name: 'ANDROID_KEYSTORE_BASE64', value: parameters.androidKeystoreBase64 },
|
||||
{ name: 'ANDROID_KEYSTORE_PASS', value: parameters.androidKeystorePass },
|
||||
{ name: 'ANDROID_KEYALIAS_NAME', value: parameters.androidKeyaliasName },
|
||||
{ name: 'ANDROID_KEYALIAS_PASS', value: parameters.androidKeyaliasPass },
|
||||
{ name: 'CUSTOM_PARAMETERS', value: parameters.customParameters },
|
||||
{ name: 'CHOWN_FILES_TO', value: parameters.chownFilesTo },
|
||||
{ name: 'GITHUB_REF', value: process.env.GITHUB_REF },
|
||||
{ name: 'GITHUB_SHA', value: process.env.GITHUB_SHA },
|
||||
{ name: 'GITHUB_REPOSITORY', value: process.env.GITHUB_REPOSITORY },
|
||||
{ name: 'GITHUB_ACTOR', value: process.env.GITHUB_ACTOR },
|
||||
{ name: 'GITHUB_WORKFLOW', value: process.env.GITHUB_WORKFLOW },
|
||||
{ name: 'GITHUB_HEAD_REF', value: process.env.GITHUB_HEAD_REF },
|
||||
{ name: 'GITHUB_BASE_REF', value: process.env.GITHUB_BASE_REF },
|
||||
{ name: 'GITHUB_EVENT_NAME', value: process.env.GITHUB_EVENT_NAME },
|
||||
{ name: 'GITHUB_WORKSPACE', value: '/github/workspace' },
|
||||
{ name: 'GITHUB_ACTION', value: process.env.GITHUB_ACTION },
|
||||
{ name: 'GITHUB_EVENT_PATH', value: process.env.GITHUB_EVENT_PATH },
|
||||
{ name: 'RUNNER_OS', value: process.env.RUNNER_OS },
|
||||
{ name: 'RUNNER_TOOL_CACHE', value: process.env.RUNNER_TOOL_CACHE },
|
||||
{ name: 'RUNNER_TEMP', value: process.env.RUNNER_TEMP },
|
||||
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
||||
];
|
||||
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
||||
return environmentVariables;
|
||||
}
|
||||
}
|
||||
|
||||
export default ImageEnvironmentFactory;
|
||||
@@ -9,8 +9,7 @@ import Platform from './platform';
|
||||
import Project from './project';
|
||||
import Unity from './unity';
|
||||
import Versioning from './versioning';
|
||||
import Kubernetes from './kubernetes';
|
||||
import RemoteBuilder from './remote-builder/remote-builder';
|
||||
import CloudRunner from './cloud-runner/cloud-runner';
|
||||
|
||||
export {
|
||||
Action,
|
||||
@@ -24,6 +23,5 @@ export {
|
||||
Project,
|
||||
Unity,
|
||||
Versioning,
|
||||
Kubernetes,
|
||||
RemoteBuilder,
|
||||
CloudRunner as CloudRunner,
|
||||
};
|
||||
|
||||
17
src/model/input-readers/action-yaml.ts
Normal file
17
src/model/input-readers/action-yaml.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import YAML from 'yaml';
|
||||
|
||||
export class ActionYamlReader {
|
||||
private actionYamlParsed: any;
|
||||
public constructor() {
|
||||
let filename = `action.yml`;
|
||||
if (!fs.existsSync(filename)) {
|
||||
filename = path.join(__dirname, `..`, filename);
|
||||
}
|
||||
this.actionYamlParsed = YAML.parse(fs.readFileSync(filename).toString());
|
||||
}
|
||||
public GetActionYamlValue(key: string) {
|
||||
return this.actionYamlParsed.inputs[key]?.description || 'No description found in action.yml';
|
||||
}
|
||||
}
|
||||
8
src/model/input-readers/git-repo.test.ts
Normal file
8
src/model/input-readers/git-repo.test.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { GitRepoReader } from './git-repo';
|
||||
|
||||
describe(`git repo tests`, () => {
|
||||
it(`Branch value parsed from CLI to not contain illegal characters`, async () => {
|
||||
expect(await GitRepoReader.GetBranch()).not.toContain(`\n`);
|
||||
expect(await GitRepoReader.GetBranch()).not.toContain(` `);
|
||||
});
|
||||
});
|
||||
20
src/model/input-readers/git-repo.ts
Normal file
20
src/model/input-readers/git-repo.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { assert } from 'console';
|
||||
import System from '../system';
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
|
||||
|
||||
export class GitRepoReader {
|
||||
static GetSha() {
|
||||
return '';
|
||||
}
|
||||
public static async GetRemote() {
|
||||
return (await CloudRunnerSystem.Run(`git remote -v`))
|
||||
.split(' ')[1]
|
||||
.split('https://github.com/')[1]
|
||||
.split('.git')[0];
|
||||
}
|
||||
public static async GetBranch() {
|
||||
assert(fs.existsSync(`.git`));
|
||||
return (await System.run(`git branch`, [], {}, false)).split('*')[1].split(`\n`)[0].replace(/ /g, ``);
|
||||
}
|
||||
}
|
||||
9
src/model/input-readers/github-cli.test.ts
Normal file
9
src/model/input-readers/github-cli.test.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GithubCliReader } from './github-cli';
|
||||
import * as core from '@actions/core';
|
||||
|
||||
describe(`github cli`, () => {
|
||||
it(`returns`, async () => {
|
||||
const token = await GithubCliReader.GetGitHubAuthToken();
|
||||
core.info(token);
|
||||
});
|
||||
});
|
||||
20
src/model/input-readers/github-cli.ts
Normal file
20
src/model/input-readers/github-cli.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
|
||||
import * as core from '@actions/core';
|
||||
|
||||
export class GithubCliReader {
|
||||
static async GetGitHubAuthToken() {
|
||||
try {
|
||||
const authStatus = await CloudRunnerSystem.Run(`gh auth status`, true);
|
||||
if (authStatus.includes('You are not logged') || authStatus === '') {
|
||||
return '';
|
||||
}
|
||||
return (await CloudRunnerSystem.Run(`gh auth status -t`))
|
||||
.split(`Token: `)[1]
|
||||
.replace(/ /g, '')
|
||||
.replace(/\n/g, '');
|
||||
} catch (error: any) {
|
||||
core.info(error || 'Failed to get github auth token from gh cli');
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
8
src/model/input-readers/test-license-reader.ts
Normal file
8
src/model/input-readers/test-license-reader.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import YAML from 'yaml';
|
||||
|
||||
export function ReadLicense() {
|
||||
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
||||
return fs.existsSync(pipelineFile) ? YAML.parse(fs.readFileSync(pipelineFile, 'utf8')).env.UNITY_LICENSE : '';
|
||||
}
|
||||
@@ -48,7 +48,7 @@ describe('Input', () => {
|
||||
|
||||
describe('projectPath', () => {
|
||||
it('returns the default value', () => {
|
||||
expect(Input.projectPath).toStrictEqual('.');
|
||||
expect(Input.projectPath).toStrictEqual('test-project');
|
||||
});
|
||||
|
||||
it('takes input from the users workflow', () => {
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { GitRepoReader } from './input-readers/git-repo';
|
||||
import { GithubCliReader } from './input-readers/github-cli';
|
||||
import Platform from './platform';
|
||||
|
||||
const core = require('@actions/core');
|
||||
@@ -8,71 +12,134 @@ const core = require('@actions/core');
|
||||
* Note that input is always passed as a string, even booleans.
|
||||
*/
|
||||
class Input {
|
||||
public static cliOptions;
|
||||
public static githubInputEnabled: boolean = true;
|
||||
|
||||
// also enabled debug logging for cloud runner
|
||||
static get cloudRunnerTests(): boolean {
|
||||
return Input.getInput(`cloudRunnerTests`) || Input.getInput(`CloudRunnerTests`) || false;
|
||||
}
|
||||
private static getInput(query) {
|
||||
const coreInput = core.getInput(query);
|
||||
if (Input.githubInputEnabled && coreInput && coreInput !== '') {
|
||||
return coreInput;
|
||||
}
|
||||
|
||||
return Input.cliOptions !== undefined && Input.cliOptions[query] !== undefined
|
||||
? Input.cliOptions[query]
|
||||
: process.env[query] !== undefined
|
||||
? process.env[query]
|
||||
: process.env[Input.ToEnvVarFormat(query)]
|
||||
? process.env[Input.ToEnvVarFormat(query)]
|
||||
: '';
|
||||
}
|
||||
static get region(): string {
|
||||
return Input.getInput('region') || 'eu-west-2';
|
||||
}
|
||||
static async githubRepo() {
|
||||
return (
|
||||
Input.getInput('GITHUB_REPOSITORY') ||
|
||||
Input.getInput('GITHUB_REPO') ||
|
||||
(await GitRepoReader.GetRemote()) ||
|
||||
'game-ci/unity-builder'
|
||||
);
|
||||
}
|
||||
static async branch() {
|
||||
if (await GitRepoReader.GetBranch()) {
|
||||
return await GitRepoReader.GetBranch();
|
||||
} else if (Input.getInput(`GITHUB_REF`)) {
|
||||
return Input.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '');
|
||||
} else if (Input.getInput('branch')) {
|
||||
return Input.getInput('branch');
|
||||
} else {
|
||||
return 'main';
|
||||
}
|
||||
}
|
||||
|
||||
static get gitSha() {
|
||||
if (Input.getInput(`GITHUB_SHA`)) {
|
||||
return Input.getInput(`GITHUB_SHA`);
|
||||
} else if (Input.getInput(`GitSHA`)) {
|
||||
return Input.getInput(`GitSHA`);
|
||||
} else if (GitRepoReader.GetSha()) {
|
||||
return GitRepoReader.GetSha();
|
||||
}
|
||||
}
|
||||
static get runNumber() {
|
||||
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
|
||||
}
|
||||
|
||||
static get unityVersion() {
|
||||
return core.getInput('unityVersion') || 'auto';
|
||||
return Input.getInput('unityVersion') || 'auto';
|
||||
}
|
||||
|
||||
static get customImage() {
|
||||
return core.getInput('customImage');
|
||||
return Input.getInput('customImage');
|
||||
}
|
||||
|
||||
static get targetPlatform() {
|
||||
return core.getInput('targetPlatform') || Platform.default;
|
||||
return Input.getInput('targetPlatform') || Platform.default;
|
||||
}
|
||||
|
||||
static get projectPath() {
|
||||
const rawProjectPath = core.getInput('projectPath') || '.';
|
||||
const input = Input.getInput('projectPath');
|
||||
const rawProjectPath = input
|
||||
? input
|
||||
: fs.existsSync(path.join('test-project', 'ProjectSettings', 'ProjectVersion.txt')) &&
|
||||
!fs.existsSync(path.join('ProjectSettings', 'ProjectVersion.txt'))
|
||||
? 'test-project'
|
||||
: '.';
|
||||
return rawProjectPath.replace(/\/$/, '');
|
||||
}
|
||||
|
||||
static get buildName() {
|
||||
return core.getInput('buildName') || this.targetPlatform;
|
||||
return Input.getInput('buildName') || this.targetPlatform;
|
||||
}
|
||||
|
||||
static get buildsPath() {
|
||||
return core.getInput('buildsPath') || 'build';
|
||||
return Input.getInput('buildsPath') || 'build';
|
||||
}
|
||||
|
||||
static get buildMethod() {
|
||||
return core.getInput('buildMethod'); // processed in docker file
|
||||
return Input.getInput('buildMethod') || ''; // processed in docker file
|
||||
}
|
||||
|
||||
static get versioningStrategy() {
|
||||
return core.getInput('versioning') || 'Semantic';
|
||||
return Input.getInput('versioning') || 'Semantic';
|
||||
}
|
||||
|
||||
static get specifiedVersion() {
|
||||
return core.getInput('version') || '';
|
||||
return Input.getInput('version') || '';
|
||||
}
|
||||
|
||||
static get androidVersionCode() {
|
||||
return core.getInput('androidVersionCode');
|
||||
return Input.getInput('androidVersionCode');
|
||||
}
|
||||
|
||||
static get androidAppBundle() {
|
||||
const input = core.getInput('androidAppBundle') || false;
|
||||
const input = Input.getInput('androidAppBundle') || false;
|
||||
|
||||
return input === 'true';
|
||||
}
|
||||
|
||||
static get androidKeystoreName() {
|
||||
return core.getInput('androidKeystoreName') || '';
|
||||
return Input.getInput('androidKeystoreName') || '';
|
||||
}
|
||||
|
||||
static get androidKeystoreBase64() {
|
||||
return core.getInput('androidKeystoreBase64') || '';
|
||||
return Input.getInput('androidKeystoreBase64') || '';
|
||||
}
|
||||
|
||||
static get androidKeystorePass() {
|
||||
return core.getInput('androidKeystorePass') || '';
|
||||
return Input.getInput('androidKeystorePass') || '';
|
||||
}
|
||||
|
||||
static get androidKeyaliasName() {
|
||||
return core.getInput('androidKeyaliasName') || '';
|
||||
return Input.getInput('androidKeyaliasName') || '';
|
||||
}
|
||||
|
||||
static get androidKeyaliasPass() {
|
||||
return core.getInput('androidKeyaliasPass') || '';
|
||||
return Input.getInput('androidKeyaliasPass') || '';
|
||||
}
|
||||
|
||||
static get androidTargetSdkVersion() {
|
||||
@@ -80,57 +147,77 @@ class Input {
|
||||
}
|
||||
|
||||
static get allowDirtyBuild() {
|
||||
const input = core.getInput('allowDirtyBuild') || false;
|
||||
const input = Input.getInput('allowDirtyBuild') || false;
|
||||
|
||||
return input === 'true';
|
||||
}
|
||||
|
||||
static get customParameters() {
|
||||
return core.getInput('customParameters') || '';
|
||||
return Input.getInput('customParameters') || '';
|
||||
}
|
||||
|
||||
static get sshAgent() {
|
||||
return core.getInput('sshAgent') || '';
|
||||
return Input.getInput('sshAgent') || '';
|
||||
}
|
||||
|
||||
static get gitPrivateToken() {
|
||||
return core.getInput('gitPrivateToken') || '';
|
||||
static async githubToken() {
|
||||
return Input.getInput('githubToken') || (await GithubCliReader.GetGitHubAuthToken()) || '';
|
||||
}
|
||||
|
||||
static async gitPrivateToken() {
|
||||
return core.getInput('gitPrivateToken') || (await Input.githubToken());
|
||||
}
|
||||
|
||||
static get chownFilesTo() {
|
||||
return core.getInput('chownFilesTo') || '';
|
||||
return Input.getInput('chownFilesTo') || '';
|
||||
}
|
||||
|
||||
static get remoteBuildCluster() {
|
||||
return core.getInput('remoteBuildCluster') || '';
|
||||
static get postBuildSteps() {
|
||||
return Input.getInput('postBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get awsStackName() {
|
||||
return core.getInput('awsStackName') || '';
|
||||
static get preBuildSteps() {
|
||||
return Input.getInput('preBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get customJob() {
|
||||
return Input.getInput('customJob') || '';
|
||||
}
|
||||
|
||||
static get cloudRunnerCluster() {
|
||||
return Input.getInput('cloudRunnerCluster') || '';
|
||||
}
|
||||
|
||||
static get awsBaseStackName() {
|
||||
return Input.getInput('awsBaseStackName') || 'game-ci';
|
||||
}
|
||||
|
||||
static get kubeConfig() {
|
||||
return core.getInput('kubeConfig') || '';
|
||||
return Input.getInput('kubeConfig') || '';
|
||||
}
|
||||
|
||||
static get githubToken() {
|
||||
return core.getInput('githubToken') || '';
|
||||
static get cloudRunnerMemory() {
|
||||
return Input.getInput('cloudRunnerMemory') || '750M';
|
||||
}
|
||||
|
||||
static get remoteBuildMemory() {
|
||||
return core.getInput('remoteBuildMemory') || '800M';
|
||||
}
|
||||
|
||||
static get remoteBuildCpu() {
|
||||
return core.getInput('remoteBuildCpu') || '0.25';
|
||||
static get cloudRunnerCpu() {
|
||||
return Input.getInput('cloudRunnerCpu') || '1.0';
|
||||
}
|
||||
|
||||
static get kubeVolumeSize() {
|
||||
return core.getInput('kubeVolumeSize') || '5Gi';
|
||||
return Input.getInput('kubeVolumeSize') || '5Gi';
|
||||
}
|
||||
|
||||
static get kubeVolume() {
|
||||
return core.getInput('kubeVolume') || '';
|
||||
return Input.getInput('kubeVolume') || '';
|
||||
}
|
||||
|
||||
public static ToEnvVarFormat(input: string) {
|
||||
return input
|
||||
.replace(/([A-Z])/g, ' $1')
|
||||
.trim()
|
||||
.toUpperCase()
|
||||
.replace(/ /g, '_');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,354 +0,0 @@
|
||||
// @ts-ignore
|
||||
import { Client, KubeConfig } from 'kubernetes-client';
|
||||
import Request from 'kubernetes-client/backends/request';
|
||||
|
||||
const core = require('@actions/core');
|
||||
const base64 = require('base-64');
|
||||
|
||||
const pollInterval = 10000;
|
||||
|
||||
class Kubernetes {
|
||||
private static kubeClient: any;
|
||||
private static buildId: string;
|
||||
private static buildParameters: any;
|
||||
private static baseImage: any;
|
||||
private static pvcName: string;
|
||||
private static secretName: string;
|
||||
private static jobName: string;
|
||||
private static namespace: string;
|
||||
|
||||
static async runBuildJob(buildParameters, baseImage) {
|
||||
const kubeconfig = new KubeConfig();
|
||||
kubeconfig.loadFromString(base64.decode(buildParameters.kubeConfig));
|
||||
const backend = new Request({ kubeconfig });
|
||||
const kubeClient = new Client(backend);
|
||||
await kubeClient.loadSpec();
|
||||
|
||||
const buildId = Kubernetes.uuidv4();
|
||||
const pvcName = `unity-builder-pvc-${buildId}`;
|
||||
const secretName = `build-credentials-${buildId}`;
|
||||
const jobName = `unity-builder-job-${buildId}`;
|
||||
const namespace = 'default';
|
||||
|
||||
this.kubeClient = kubeClient;
|
||||
this.buildId = buildId;
|
||||
this.buildParameters = buildParameters;
|
||||
this.baseImage = baseImage;
|
||||
this.pvcName = pvcName;
|
||||
this.secretName = secretName;
|
||||
this.jobName = jobName;
|
||||
this.namespace = namespace;
|
||||
|
||||
await Kubernetes.createSecret();
|
||||
await Kubernetes.createPersistentVolumeClaim();
|
||||
await Kubernetes.scheduleBuildJob();
|
||||
await Kubernetes.watchBuildJobUntilFinished();
|
||||
await Kubernetes.cleanup();
|
||||
|
||||
core.setOutput('volume', pvcName);
|
||||
}
|
||||
|
||||
static async createSecret() {
|
||||
const secretManifest = {
|
||||
apiVersion: 'v1',
|
||||
kind: 'Secret',
|
||||
metadata: {
|
||||
name: this.secretName,
|
||||
},
|
||||
type: 'Opaque',
|
||||
data: {
|
||||
GITHUB_TOKEN: base64.encode(this.buildParameters.githubToken),
|
||||
UNITY_LICENSE: base64.encode(process.env.UNITY_LICENSE),
|
||||
ANDROID_KEYSTORE_BASE64: base64.encode(this.buildParameters.androidKeystoreBase64),
|
||||
ANDROID_KEYSTORE_PASS: base64.encode(this.buildParameters.androidKeystorePass),
|
||||
ANDROID_KEYALIAS_PASS: base64.encode(this.buildParameters.androidKeyaliasPass),
|
||||
},
|
||||
};
|
||||
await this.kubeClient.api.v1.namespaces(this.namespace).secrets.post({ body: secretManifest });
|
||||
}
|
||||
|
||||
static async createPersistentVolumeClaim() {
|
||||
if (this.buildParameters.kubeVolume) {
|
||||
core.info(this.buildParameters.kubeVolume);
|
||||
this.pvcName = this.buildParameters.kubeVolume;
|
||||
return;
|
||||
}
|
||||
const pvcManifest = {
|
||||
apiVersion: 'v1',
|
||||
kind: 'PersistentVolumeClaim',
|
||||
metadata: {
|
||||
name: this.pvcName,
|
||||
},
|
||||
spec: {
|
||||
accessModes: ['ReadWriteOnce'],
|
||||
volumeMode: 'Filesystem',
|
||||
resources: {
|
||||
requests: {
|
||||
storage: this.buildParameters.kubeVolumeSize,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
await this.kubeClient.api.v1.namespaces(this.namespace).persistentvolumeclaims.post({ body: pvcManifest });
|
||||
core.info('Persistent Volume created, waiting for ready state...');
|
||||
await Kubernetes.watchPersistentVolumeClaimUntilReady();
|
||||
core.info('Persistent Volume ready for claims');
|
||||
}
|
||||
|
||||
static async watchPersistentVolumeClaimUntilReady() {
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
||||
const queryResult = await this.kubeClient.api.v1
|
||||
.namespaces(this.namespace)
|
||||
.persistentvolumeclaims(this.pvcName)
|
||||
.get();
|
||||
if (queryResult.body.status.phase === 'Pending') {
|
||||
await Kubernetes.watchPersistentVolumeClaimUntilReady();
|
||||
}
|
||||
}
|
||||
|
||||
static async scheduleBuildJob() {
|
||||
core.info('Creating build job');
|
||||
const jobManifest = {
|
||||
apiVersion: 'batch/v1',
|
||||
kind: 'Job',
|
||||
metadata: {
|
||||
name: this.jobName,
|
||||
labels: {
|
||||
app: 'unity-builder',
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
template: {
|
||||
backoffLimit: 1,
|
||||
spec: {
|
||||
volumes: [
|
||||
{
|
||||
name: 'data',
|
||||
persistentVolumeClaim: {
|
||||
claimName: this.pvcName,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'credentials',
|
||||
secret: {
|
||||
secretName: this.secretName,
|
||||
},
|
||||
},
|
||||
],
|
||||
initContainers: [
|
||||
{
|
||||
name: 'clone',
|
||||
image: 'alpine/git',
|
||||
command: [
|
||||
'/bin/sh',
|
||||
'-c',
|
||||
`apk update;
|
||||
apk add git-lfs;
|
||||
export GITHUB_TOKEN=$(cat /credentials/GITHUB_TOKEN);
|
||||
cd /data;
|
||||
git clone https://github.com/${process.env.GITHUB_REPOSITORY}.git repo;
|
||||
git clone https://github.com/webbertakken/unity-builder.git builder;
|
||||
cd repo;
|
||||
git checkout $GITHUB_SHA;
|
||||
ls`,
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: 'data',
|
||||
mountPath: '/data',
|
||||
},
|
||||
{
|
||||
name: 'credentials',
|
||||
mountPath: '/credentials',
|
||||
readOnly: true,
|
||||
},
|
||||
],
|
||||
env: [
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: this.buildId,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
containers: [
|
||||
{
|
||||
name: 'main',
|
||||
image: `${this.baseImage.toString()}`,
|
||||
command: [
|
||||
'bin/bash',
|
||||
'-c',
|
||||
`for f in ./credentials/*; do export $(basename $f)="$(cat $f)"; done
|
||||
cp -r /data/builder/action/default-build-script /UnityBuilderAction
|
||||
cp -r /data/builder/action/entrypoint.sh /entrypoint.sh
|
||||
cp -r /data/builder/action/steps /steps
|
||||
chmod -R +x /entrypoint.sh;
|
||||
chmod -R +x /steps;
|
||||
/entrypoint.sh;
|
||||
`,
|
||||
],
|
||||
resources: {
|
||||
requests: {
|
||||
memory: this.buildParameters.kubeContainerMemory,
|
||||
cpu: this.buildParameters.kubeContainerCPU,
|
||||
},
|
||||
},
|
||||
env: [
|
||||
{
|
||||
name: 'GITHUB_WORKSPACE',
|
||||
value: '/data/repo',
|
||||
},
|
||||
{
|
||||
name: 'PROJECT_PATH',
|
||||
value: this.buildParameters.projectPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_PATH',
|
||||
value: this.buildParameters.buildPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_FILE',
|
||||
value: this.buildParameters.buildFile,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_NAME',
|
||||
value: this.buildParameters.buildName,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_METHOD',
|
||||
value: this.buildParameters.buildMethod,
|
||||
},
|
||||
{
|
||||
name: 'CUSTOM_PARAMETERS',
|
||||
value: this.buildParameters.customParameters,
|
||||
},
|
||||
{
|
||||
name: 'CHOWN_FILES_TO',
|
||||
value: this.buildParameters.chownFilesTo,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: this.buildParameters.platform,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_VERSION_CODE',
|
||||
value: this.buildParameters.androidVersionCode.toString(),
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYSTORE_NAME',
|
||||
value: this.buildParameters.androidKeystoreName,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYALIAS_NAME',
|
||||
value: this.buildParameters.androidKeyaliasName,
|
||||
},
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: 'data',
|
||||
mountPath: '/data',
|
||||
},
|
||||
{
|
||||
name: 'credentials',
|
||||
mountPath: '/credentials',
|
||||
readOnly: true,
|
||||
},
|
||||
],
|
||||
lifeCycle: {
|
||||
preStop: {
|
||||
exec: {
|
||||
command: [
|
||||
'bin/bash',
|
||||
'-c',
|
||||
`cd /data/builder/action/steps;
|
||||
chmod +x /return_license.sh;
|
||||
/return_license.sh;`,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
restartPolicy: 'Never',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
await this.kubeClient.apis.batch.v1.namespaces(this.namespace).jobs.post({ body: jobManifest });
|
||||
core.info('Job created');
|
||||
}
|
||||
|
||||
static async watchBuildJobUntilFinished() {
|
||||
let podname;
|
||||
let ready = false;
|
||||
while (!ready) {
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
||||
const pods = await this.kubeClient.api.v1.namespaces(this.namespace).pods.get();
|
||||
for (let index = 0; index < pods.body.items.length; index++) {
|
||||
const element = pods.body.items[index];
|
||||
if (element.metadata.labels['job-name'] === this.jobName && element.status.phase !== 'Pending') {
|
||||
core.info('Pod no longer pending');
|
||||
if (element.status.phase === 'Failure') {
|
||||
core.error('Kubernetes job failed');
|
||||
} else {
|
||||
ready = true;
|
||||
podname = element.metadata.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
core.info(`Watching build job ${podname}`);
|
||||
let logQueryTime;
|
||||
let complete = false;
|
||||
while (!complete) {
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval));
|
||||
|
||||
const podStatus = await this.kubeClient.api.v1.namespaces(this.namespace).pod(podname).get();
|
||||
if (podStatus.body.status.phase !== 'Running') {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
const logs = await this.kubeClient.api.v1
|
||||
.namespaces(this.namespace)
|
||||
.pod(podname)
|
||||
.log.get({
|
||||
qs: {
|
||||
sinceTime: logQueryTime,
|
||||
timestamps: true,
|
||||
},
|
||||
});
|
||||
if (logs.body !== undefined) {
|
||||
const arrayOfLines = logs.body.match(/[^\n\r]+/g).reverse();
|
||||
for (const element of arrayOfLines) {
|
||||
const [time, ...line] = element.split(' ');
|
||||
if (time !== logQueryTime) {
|
||||
core.info(line.join(' '));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (podStatus.body.status.phase === 'Failed') {
|
||||
throw new Error('Kubernetes job failed');
|
||||
}
|
||||
|
||||
logQueryTime = arrayOfLines[0].split(' ')[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async cleanup() {
|
||||
await this.kubeClient.apis.batch.v1.namespaces(this.namespace).jobs(this.jobName).delete();
|
||||
await this.kubeClient.api.v1.namespaces(this.namespace).secrets(this.secretName).delete();
|
||||
}
|
||||
|
||||
static uuidv4() {
|
||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {
|
||||
const r = Math.trunc(Math.random() * 16);
|
||||
const v = c === 'x' ? r : (r & 0x3) | 0x8;
|
||||
return v.toString(16);
|
||||
});
|
||||
}
|
||||
}
|
||||
export default Kubernetes;
|
||||
@@ -1,253 +0,0 @@
|
||||
import * as SDK from 'aws-sdk';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import RemoteBuilderSecret from './remote-builder-secret';
|
||||
import RemoteBuilderEnvironmentVariable from './remote-builder-environment-variable';
|
||||
import * as fs from 'fs';
|
||||
import * as core from '@actions/core';
|
||||
import RemoteBuilderTaskDef from './remote-builder-task-def';
|
||||
import RemoteBuilderConstants from './remote-builder-constants';
|
||||
import AWSBuildRunner from './aws-build-runner';
|
||||
|
||||
class AWSBuildEnvironment {
|
||||
static async runBuild(
|
||||
buildId: string,
|
||||
stackName: string,
|
||||
image: string,
|
||||
commands: string[],
|
||||
mountdir: string,
|
||||
workingdir: string,
|
||||
environment: RemoteBuilderEnvironmentVariable[],
|
||||
secrets: RemoteBuilderSecret[],
|
||||
) {
|
||||
const ECS = new SDK.ECS();
|
||||
const CF = new SDK.CloudFormation();
|
||||
const entrypoint = ['/bin/sh'];
|
||||
|
||||
const taskDef = await this.setupCloudFormations(
|
||||
CF,
|
||||
buildId,
|
||||
stackName,
|
||||
image,
|
||||
entrypoint,
|
||||
commands,
|
||||
mountdir,
|
||||
workingdir,
|
||||
secrets,
|
||||
);
|
||||
try {
|
||||
await AWSBuildRunner.runTask(taskDef, ECS, CF, environment, buildId);
|
||||
} finally {
|
||||
await this.cleanupResources(CF, taskDef);
|
||||
}
|
||||
}
|
||||
|
||||
// static async setupPlatformResources() {
|
||||
// throw new Error('Method not implemented.');
|
||||
// }
|
||||
|
||||
static getParameterTemplate(p1) {
|
||||
return `
|
||||
${p1}:
|
||||
Type: String
|
||||
Default: ''
|
||||
`;
|
||||
}
|
||||
|
||||
static getSecretTemplate(p1) {
|
||||
return `
|
||||
${p1}Secret:
|
||||
Type: AWS::SecretsManager::Secret
|
||||
Properties:
|
||||
Name: !Join [ "", [ '${p1}', !Ref BUILDID ] ]
|
||||
SecretString: !Ref ${p1}
|
||||
`;
|
||||
}
|
||||
|
||||
static getSecretDefinitionTemplate(p1, p2) {
|
||||
return `
|
||||
- Name: '${p1}'
|
||||
ValueFrom: !Ref ${p2}Secret
|
||||
`;
|
||||
}
|
||||
|
||||
static insertAtTemplate(template, insertionKey, insertion) {
|
||||
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
|
||||
template = [template.slice(0, index), insertion, template.slice(index)].join('');
|
||||
return template;
|
||||
}
|
||||
|
||||
static async setupCloudFormations(
|
||||
CF: SDK.CloudFormation,
|
||||
buildUid: string,
|
||||
stackName: string,
|
||||
image: string,
|
||||
entrypoint: string[],
|
||||
commands: string[],
|
||||
mountdir: string,
|
||||
workingdir: string,
|
||||
secrets: RemoteBuilderSecret[],
|
||||
): Promise<RemoteBuilderTaskDef> {
|
||||
const logid = customAlphabet(RemoteBuilderConstants.alphabet, 9)();
|
||||
commands[1] += `
|
||||
echo "${logid}"
|
||||
`;
|
||||
const taskDefStackName = `${stackName}-${buildUid}`;
|
||||
let taskDefCloudFormation = this.readTaskCloudFormationTemplate();
|
||||
const cleanupTaskDefStackName = `${taskDefStackName}-cleanup`;
|
||||
const cleanupCloudFormation = fs.readFileSync(`${__dirname}/cloud-formations/cloudformation-stack-ttl.yml`, 'utf8');
|
||||
|
||||
try {
|
||||
for (const secret of secrets) {
|
||||
taskDefCloudFormation = this.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p1 - input',
|
||||
this.getParameterTemplate(secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
|
||||
);
|
||||
taskDefCloudFormation = this.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p2 - secret',
|
||||
this.getSecretTemplate(secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
|
||||
);
|
||||
taskDefCloudFormation = this.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p3 - container def',
|
||||
this.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey.replace(/[^\dA-Za-z]/g, '')),
|
||||
);
|
||||
}
|
||||
const mappedSecrets = secrets.map((x) => {
|
||||
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
|
||||
});
|
||||
|
||||
await CF.createStack({
|
||||
StackName: taskDefStackName,
|
||||
TemplateBody: taskDefCloudFormation,
|
||||
Parameters: [
|
||||
{
|
||||
ParameterKey: 'ImageUrl',
|
||||
ParameterValue: image,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'ServiceName',
|
||||
ParameterValue: taskDefStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'Command',
|
||||
ParameterValue: commands.join(','),
|
||||
},
|
||||
{
|
||||
ParameterKey: 'EntryPoint',
|
||||
ParameterValue: entrypoint.join(','),
|
||||
},
|
||||
{
|
||||
ParameterKey: 'WorkingDirectory',
|
||||
ParameterValue: workingdir,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'EFSMountDirectory',
|
||||
ParameterValue: mountdir,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'BUILDID',
|
||||
ParameterValue: buildUid,
|
||||
},
|
||||
...mappedSecrets,
|
||||
],
|
||||
}).promise();
|
||||
core.info('Creating worker cluster...');
|
||||
await CF.createStack({
|
||||
StackName: cleanupTaskDefStackName,
|
||||
TemplateBody: cleanupCloudFormation,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
Parameters: [
|
||||
{
|
||||
ParameterKey: 'StackName',
|
||||
ParameterValue: taskDefStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'DeleteStackName',
|
||||
ParameterValue: cleanupTaskDefStackName,
|
||||
},
|
||||
{
|
||||
ParameterKey: 'TTL',
|
||||
ParameterValue: '100',
|
||||
},
|
||||
{
|
||||
ParameterKey: 'BUILDID',
|
||||
ParameterValue: buildUid,
|
||||
},
|
||||
],
|
||||
}).promise();
|
||||
core.info('Creating cleanup cluster...');
|
||||
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
||||
} catch (error) {
|
||||
await AWSBuildEnvironment.handleStackCreationFailure(error, CF, taskDefStackName, taskDefCloudFormation, secrets);
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
const taskDefResources = (
|
||||
await CF.describeStackResources({
|
||||
StackName: taskDefStackName,
|
||||
}).promise()
|
||||
).StackResources;
|
||||
|
||||
const baseResources = (await CF.describeStackResources({ StackName: stackName }).promise()).StackResources;
|
||||
|
||||
// in the future we should offer a parameter to choose if you want the guarnteed shutdown.
|
||||
core.info('Worker cluster created successfully (skipping wait for cleanup cluster to be ready)');
|
||||
|
||||
return {
|
||||
taskDefStackName,
|
||||
taskDefCloudFormation,
|
||||
taskDefStackNameTTL: cleanupTaskDefStackName,
|
||||
ttlCloudFormation: cleanupCloudFormation,
|
||||
taskDefResources,
|
||||
baseResources,
|
||||
logid,
|
||||
};
|
||||
}
|
||||
|
||||
private static async handleStackCreationFailure(
|
||||
error: any,
|
||||
CF: SDK.CloudFormation,
|
||||
taskDefStackName: string,
|
||||
taskDefCloudFormation: string,
|
||||
secrets: RemoteBuilderSecret[],
|
||||
) {
|
||||
core.info(JSON.stringify(secrets, undefined, 4));
|
||||
core.info(taskDefCloudFormation);
|
||||
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
|
||||
const resources = (await CF.describeStackResources({ StackName: taskDefStackName }).promise()).StackResources;
|
||||
core.info(JSON.stringify(events, undefined, 4));
|
||||
core.info(JSON.stringify(resources, undefined, 4));
|
||||
core.error(error);
|
||||
}
|
||||
|
||||
static readTaskCloudFormationTemplate(): string {
|
||||
return fs.readFileSync(`${__dirname}/cloud-formations/task-def-formation.yml`, 'utf8');
|
||||
}
|
||||
|
||||
static async cleanupResources(CF: SDK.CloudFormation, taskDef: RemoteBuilderTaskDef) {
|
||||
core.info('Cleanup starting');
|
||||
await CF.deleteStack({
|
||||
StackName: taskDef.taskDefStackName,
|
||||
}).promise();
|
||||
|
||||
await CF.deleteStack({
|
||||
StackName: taskDef.taskDefStackNameTTL,
|
||||
}).promise();
|
||||
|
||||
await CF.waitFor('stackDeleteComplete', {
|
||||
StackName: taskDef.taskDefStackName,
|
||||
}).promise();
|
||||
|
||||
// Currently too slow and causes too much waiting
|
||||
await CF.waitFor('stackDeleteComplete', {
|
||||
StackName: taskDef.taskDefStackNameTTL,
|
||||
}).promise();
|
||||
|
||||
core.info('Cleanup complete');
|
||||
}
|
||||
}
|
||||
export default AWSBuildEnvironment;
|
||||
@@ -1,165 +0,0 @@
|
||||
import * as AWS from 'aws-sdk';
|
||||
import RemoteBuilderEnvironmentVariable from './remote-builder-environment-variable';
|
||||
import * as core from '@actions/core';
|
||||
import RemoteBuilderTaskDef from './remote-builder-task-def';
|
||||
import * as zlib from 'zlib';
|
||||
|
||||
class AWSBuildRunner {
|
||||
static async runTask(
|
||||
taskDef: RemoteBuilderTaskDef,
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
environment: RemoteBuilderEnvironmentVariable[],
|
||||
buildUid: string,
|
||||
) {
|
||||
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
|
||||
const taskDefinition =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
|
||||
const SubnetOne =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetOne')?.PhysicalResourceId || '';
|
||||
const SubnetTwo =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'PublicSubnetTwo')?.PhysicalResourceId || '';
|
||||
const ContainerSecurityGroup =
|
||||
taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ContainerSecurityGroup')?.PhysicalResourceId || '';
|
||||
const streamName =
|
||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
|
||||
|
||||
const task = await ECS.runTask({
|
||||
cluster,
|
||||
taskDefinition,
|
||||
platformVersion: '1.4.0',
|
||||
overrides: {
|
||||
containerOverrides: [
|
||||
{
|
||||
name: taskDef.taskDefStackName,
|
||||
environment: [...environment, { name: 'BUILDID', value: buildUid }],
|
||||
},
|
||||
],
|
||||
},
|
||||
launchType: 'FARGATE',
|
||||
networkConfiguration: {
|
||||
awsvpcConfiguration: {
|
||||
subnets: [SubnetOne, SubnetTwo],
|
||||
assignPublicIp: 'ENABLED',
|
||||
securityGroups: [ContainerSecurityGroup],
|
||||
},
|
||||
},
|
||||
}).promise();
|
||||
|
||||
core.info('Task is starting on worker cluster');
|
||||
const taskArn = task.tasks?.[0].taskArn || '';
|
||||
|
||||
try {
|
||||
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||
} catch (error) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const describeTasks = await ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise();
|
||||
core.info(`Task has ended ${describeTasks.tasks?.[0].containers?.[0].lastStatus}`);
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
core.info(`Task is running on worker cluster`);
|
||||
await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
await ECS.waitFor('tasksStopped', { cluster, tasks: [taskArn] }).promise();
|
||||
const exitCode = (
|
||||
await ECS.describeTasks({
|
||||
tasks: [taskArn],
|
||||
cluster,
|
||||
}).promise()
|
||||
).tasks?.[0].containers?.[0].exitCode;
|
||||
if (exitCode !== 0) {
|
||||
core.error(`job failed with exit code ${exitCode}`);
|
||||
throw new Error(`job failed with exit code ${exitCode}`);
|
||||
} else {
|
||||
core.info(`Task has finished successfully`);
|
||||
}
|
||||
}
|
||||
|
||||
static async streamLogsUntilTaskStops(
|
||||
ECS: AWS.ECS,
|
||||
CF: AWS.CloudFormation,
|
||||
taskDef: RemoteBuilderTaskDef,
|
||||
clusterName: string,
|
||||
taskArn: string,
|
||||
kinesisStreamName: string,
|
||||
) {
|
||||
// watching logs
|
||||
const kinesis = new AWS.Kinesis();
|
||||
|
||||
const getTaskData = async () => {
|
||||
const tasks = await ECS.describeTasks({
|
||||
cluster: clusterName,
|
||||
tasks: [taskArn],
|
||||
}).promise();
|
||||
return tasks.tasks?.[0];
|
||||
};
|
||||
|
||||
const stream = await kinesis
|
||||
.describeStream({
|
||||
StreamName: kinesisStreamName,
|
||||
})
|
||||
.promise();
|
||||
|
||||
let iterator =
|
||||
(
|
||||
await kinesis
|
||||
.getShardIterator({
|
||||
ShardIteratorType: 'TRIM_HORIZON',
|
||||
StreamName: stream.StreamDescription.StreamName,
|
||||
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
||||
})
|
||||
.promise()
|
||||
).ShardIterator || '';
|
||||
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDef.taskDefStackNameTTL }).promise();
|
||||
|
||||
core.info(`Task status is ${(await getTaskData())?.lastStatus}`);
|
||||
|
||||
const logBaseUrl = `https://${AWS.config.region}.console.aws.amazon.com/cloudwatch/home?region=${AWS.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
core.info(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
|
||||
let readingLogs = true;
|
||||
let timestamp: number = 0;
|
||||
while (readingLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await getTaskData();
|
||||
if (taskData?.lastStatus !== 'RUNNING') {
|
||||
if (timestamp === 0) {
|
||||
core.info('Task stopped, streaming end of logs');
|
||||
timestamp = Date.now();
|
||||
}
|
||||
if (timestamp !== 0 && Date.now() - timestamp < 30000) {
|
||||
core.info('Task status is not RUNNING for 30 seconds, last query for logs');
|
||||
readingLogs = false;
|
||||
}
|
||||
}
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
ShardIterator: iterator,
|
||||
})
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
if (records.Records.length > 0 && iterator) {
|
||||
for (let index = 0; index < records.Records.length; index++) {
|
||||
const json = JSON.parse(
|
||||
zlib.gunzipSync(Buffer.from(records.Records[index].Data as string, 'base64')).toString('utf8'),
|
||||
);
|
||||
if (json.messageType === 'DATA_MESSAGE') {
|
||||
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
||||
if (json.logEvents[logEventsIndex].message.includes(taskDef.logid)) {
|
||||
core.info('End of task logs');
|
||||
readingLogs = false;
|
||||
} else {
|
||||
core.info(json.logEvents[logEventsIndex].message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export default AWSBuildRunner;
|
||||
@@ -1,4 +0,0 @@
|
||||
class RemoteBuilderConstants {
|
||||
static alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
|
||||
}
|
||||
export default RemoteBuilderConstants;
|
||||
@@ -1,5 +0,0 @@
|
||||
class RemoteBuilderEnvironmentVariable {
|
||||
public name!: string;
|
||||
public value!: string;
|
||||
}
|
||||
export default RemoteBuilderEnvironmentVariable;
|
||||
@@ -1,419 +0,0 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import AWSBuildPlatform from './aws-build-platform';
|
||||
import * as core from '@actions/core';
|
||||
import RemoteBuilderConstants from './remote-builder-constants';
|
||||
import { BuildParameters } from '..';
|
||||
const repositoryDirectoryName = 'repo';
|
||||
const efsDirectoryName = 'data';
|
||||
const cacheDirectoryName = 'cache';
|
||||
|
||||
class RemoteBuilder {
|
||||
static SteamDeploy: boolean = false;
|
||||
static async build(buildParameters: BuildParameters, baseImage) {
|
||||
try {
|
||||
this.SteamDeploy = process.env.STEAM_DEPLOY !== undefined || false;
|
||||
const nanoid = customAlphabet(RemoteBuilderConstants.alphabet, 4);
|
||||
const buildUid = `${process.env.GITHUB_RUN_NUMBER}-${buildParameters.platform
|
||||
.replace('Standalone', '')
|
||||
.replace('standalone', '')}-${nanoid()}`;
|
||||
const defaultBranchName =
|
||||
process.env.GITHUB_REF?.split('/')
|
||||
.filter((x) => {
|
||||
x = x[0].toUpperCase() + x.slice(1);
|
||||
return x;
|
||||
})
|
||||
.join('') || '';
|
||||
const branchName =
|
||||
process.env.REMOTE_BUILDER_CACHE !== undefined ? process.env.REMOTE_BUILDER_CACHE : defaultBranchName;
|
||||
const token: string = buildParameters.githubToken;
|
||||
const defaultSecretsArray = [
|
||||
{
|
||||
ParameterKey: 'GithubToken',
|
||||
EnvironmentVariable: 'GITHUB_TOKEN',
|
||||
ParameterValue: token,
|
||||
},
|
||||
];
|
||||
await RemoteBuilder.SetupStep(buildUid, buildParameters, branchName, defaultSecretsArray);
|
||||
await RemoteBuilder.BuildStep(buildUid, buildParameters, baseImage, defaultSecretsArray);
|
||||
await RemoteBuilder.CompressionStep(buildUid, buildParameters, branchName, defaultSecretsArray);
|
||||
await RemoteBuilder.UploadArtifacts(buildUid, buildParameters, branchName, defaultSecretsArray);
|
||||
if (this.SteamDeploy) {
|
||||
await RemoteBuilder.DeployToSteam(buildUid, buildParameters, defaultSecretsArray);
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
private static async SetupStep(
|
||||
buildUid: string,
|
||||
buildParameters: BuildParameters,
|
||||
branchName: string | undefined,
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
core.info('Starting step 1/4 clone and restore cache)');
|
||||
await AWSBuildPlatform.runBuild(
|
||||
buildUid,
|
||||
buildParameters.awsStackName,
|
||||
'alpine/git',
|
||||
[
|
||||
'-c',
|
||||
`apk update;
|
||||
apk add unzip;
|
||||
apk add git-lfs;
|
||||
apk add jq;
|
||||
# Get source repo for project to be built and game-ci repo for utilties
|
||||
git clone https://${buildParameters.githubToken}@github.com/${
|
||||
process.env.GITHUB_REPOSITORY
|
||||
}.git ${buildUid}/${repositoryDirectoryName} -q
|
||||
git clone https://${buildParameters.githubToken}@github.com/game-ci/unity-builder.git ${buildUid}/builder -q
|
||||
git clone https://${buildParameters.githubToken}@github.com/game-ci/steam-deploy.git ${buildUid}/steam -q
|
||||
cd /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/
|
||||
git checkout $GITHUB_SHA
|
||||
cd /${efsDirectoryName}/
|
||||
# Look for usable cache
|
||||
if [ ! -d ${cacheDirectoryName} ]; then
|
||||
mkdir ${cacheDirectoryName}
|
||||
fi
|
||||
cd ${cacheDirectoryName}
|
||||
if [ ! -d "${branchName}" ]; then
|
||||
mkdir "${branchName}"
|
||||
fi
|
||||
cd "${branchName}"
|
||||
echo ''
|
||||
echo "Cached Libraries for ${branchName} from previous builds:"
|
||||
ls
|
||||
echo ''
|
||||
ls "/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}"
|
||||
libDir="/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}/Library"
|
||||
if [ -d "$libDir" ]; then
|
||||
rm -r "$libDir"
|
||||
echo "Setup .gitignore to ignore Library folder and remove it from builds"
|
||||
fi
|
||||
echo 'Checking cache'
|
||||
# Restore cache
|
||||
latest=$(ls -t | head -1)
|
||||
if [ ! -z "$latest" ]; then
|
||||
echo "Library cache exists from build $latest from ${branchName}"
|
||||
echo 'Creating empty Library folder for cache'
|
||||
mkdir $libDir
|
||||
unzip -q $latest -d $libDir
|
||||
# purge cache
|
||||
${process.env.PURGE_REMOTE_BUILDER_CACHE === undefined ? '#' : ''} rm -r $libDir
|
||||
else
|
||||
echo 'Cache does not exist'
|
||||
fi
|
||||
# Print out important directories
|
||||
echo ''
|
||||
echo 'Repo:'
|
||||
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/
|
||||
echo ''
|
||||
echo 'Project:'
|
||||
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}
|
||||
echo ''
|
||||
echo 'Library:'
|
||||
ls /${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}/Library/
|
||||
echo ''
|
||||
`,
|
||||
],
|
||||
`/${efsDirectoryName}`,
|
||||
`/${efsDirectoryName}/`,
|
||||
[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: process.env.GITHUB_SHA || '',
|
||||
},
|
||||
],
|
||||
defaultSecretsArray,
|
||||
);
|
||||
}
|
||||
|
||||
private static async BuildStep(
|
||||
buildUid: string,
|
||||
buildParameters: BuildParameters,
|
||||
baseImage: any,
|
||||
defaultSecretsArray: any[],
|
||||
) {
|
||||
const buildSecrets = new Array();
|
||||
|
||||
buildSecrets.push(...defaultSecretsArray);
|
||||
|
||||
if (process.env.UNITY_LICENSE)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'UnityLicense',
|
||||
EnvironmentVariable: 'UNITY_LICENSE',
|
||||
ParameterValue: process.env.UNITY_LICENSE,
|
||||
});
|
||||
|
||||
if (process.env.UNITY_EMAIL)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'UnityEmail',
|
||||
EnvironmentVariable: 'UNITY_EMAIL',
|
||||
ParameterValue: process.env.UNITY_EMAIL,
|
||||
});
|
||||
|
||||
if (process.env.UNITY_PASSWORD)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'UnityPassword',
|
||||
EnvironmentVariable: 'UNITY_PASSWORD',
|
||||
ParameterValue: process.env.UNITY_PASSWORD,
|
||||
});
|
||||
|
||||
if (process.env.UNITY_SERIAL)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'UnitySerial',
|
||||
EnvironmentVariable: 'UNITY_SERIAL',
|
||||
ParameterValue: process.env.UNITY_SERIAL,
|
||||
});
|
||||
|
||||
if (buildParameters.androidKeystoreBase64)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'AndroidKeystoreBase64',
|
||||
EnvironmentVariable: 'ANDROID_KEYSTORE_BASE64',
|
||||
ParameterValue: buildParameters.androidKeystoreBase64,
|
||||
});
|
||||
|
||||
if (buildParameters.androidKeystorePass)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'AndroidKeystorePass',
|
||||
EnvironmentVariable: 'ANDROID_KEYSTORE_PASS',
|
||||
ParameterValue: buildParameters.androidKeystorePass,
|
||||
});
|
||||
|
||||
if (buildParameters.androidKeyaliasPass)
|
||||
buildSecrets.push({
|
||||
ParameterKey: 'AndroidKeyAliasPass',
|
||||
EnvironmentVariable: 'AWS_ACCESS_KEY_ALIAS_PASS',
|
||||
ParameterValue: buildParameters.androidKeyaliasPass,
|
||||
});
|
||||
core.info('Starting part 2/4 (build unity project)');
|
||||
await AWSBuildPlatform.runBuild(
|
||||
buildUid,
|
||||
buildParameters.awsStackName,
|
||||
baseImage.toString(),
|
||||
[
|
||||
'-c',
|
||||
`
|
||||
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/default-build-script/ /UnityBuilderAction;
|
||||
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/entrypoint.sh /entrypoint.sh;
|
||||
cp -r /${efsDirectoryName}/${buildUid}/builder/dist/steps/ /steps;
|
||||
chmod -R +x /entrypoint.sh;
|
||||
chmod -R +x /steps;
|
||||
/entrypoint.sh;
|
||||
`,
|
||||
],
|
||||
`/${efsDirectoryName}`,
|
||||
`/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/`,
|
||||
[
|
||||
{
|
||||
name: 'ContainerMemory',
|
||||
value: buildParameters.remoteBuildMemory,
|
||||
},
|
||||
{
|
||||
name: 'ContainerCpu',
|
||||
value: buildParameters.remoteBuildCpu,
|
||||
},
|
||||
{
|
||||
name: 'GITHUB_WORKSPACE',
|
||||
value: `/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/`,
|
||||
},
|
||||
{
|
||||
name: 'PROJECT_PATH',
|
||||
value: buildParameters.projectPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_PATH',
|
||||
value: buildParameters.buildPath,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_FILE',
|
||||
value: buildParameters.buildFile,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_NAME',
|
||||
value: buildParameters.buildName,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_METHOD',
|
||||
value: buildParameters.buildMethod,
|
||||
},
|
||||
{
|
||||
name: 'CUSTOM_PARAMETERS',
|
||||
value: buildParameters.customParameters,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: buildParameters.platform,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_VERSION_CODE',
|
||||
value: buildParameters.androidVersionCode.toString(),
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYSTORE_NAME',
|
||||
value: buildParameters.androidKeystoreName,
|
||||
},
|
||||
{
|
||||
name: 'ANDROID_KEYALIAS_NAME',
|
||||
value: buildParameters.androidKeyaliasName,
|
||||
},
|
||||
],
|
||||
buildSecrets,
|
||||
);
|
||||
}
|
||||
|
||||
private static async CompressionStep(
|
||||
buildUid: string,
|
||||
buildParameters: BuildParameters,
|
||||
branchName: string | undefined,
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
core.info('Starting step 3/4 build compression');
|
||||
// Cleanup
|
||||
await AWSBuildPlatform.runBuild(
|
||||
buildUid,
|
||||
buildParameters.awsStackName,
|
||||
'alpine',
|
||||
[
|
||||
'-c',
|
||||
`
|
||||
apk update
|
||||
apk add zip
|
||||
cd Library
|
||||
zip -r lib-${buildUid}.zip .*
|
||||
mv lib-${buildUid}.zip /${efsDirectoryName}/${cacheDirectoryName}/${branchName}/lib-${buildUid}.zip
|
||||
cd ../../
|
||||
zip -r build-${buildUid}.zip ${buildParameters.buildPath}/*
|
||||
mv build-${buildUid}.zip /${efsDirectoryName}/${buildUid}/build-${buildUid}.zip
|
||||
`,
|
||||
],
|
||||
`/${efsDirectoryName}`,
|
||||
`/${efsDirectoryName}/${buildUid}/${repositoryDirectoryName}/${buildParameters.projectPath}`,
|
||||
[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: process.env.GITHUB_SHA || '',
|
||||
},
|
||||
],
|
||||
defaultSecretsArray,
|
||||
);
|
||||
core.info('compression step complete');
|
||||
}
|
||||
|
||||
private static async UploadArtifacts(
|
||||
buildUid: string,
|
||||
buildParameters: BuildParameters,
|
||||
branchName: string | undefined,
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
core.info('Starting step 4/4 upload build to s3');
|
||||
await AWSBuildPlatform.runBuild(
|
||||
buildUid,
|
||||
buildParameters.awsStackName,
|
||||
'amazon/aws-cli',
|
||||
[
|
||||
'-c',
|
||||
`
|
||||
aws s3 cp ${buildUid}/build-${buildUid}.zip s3://game-ci-storage/
|
||||
# no need to upload Library cache for now
|
||||
# aws s3 cp /${efsDirectoryName}/${cacheDirectoryName}/${branchName}/lib-${buildUid}.zip s3://game-ci-storage/
|
||||
${this.SteamDeploy ? '#' : ''} rm -r ${buildUid}
|
||||
`,
|
||||
],
|
||||
`/${efsDirectoryName}`,
|
||||
`/${efsDirectoryName}/`,
|
||||
[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: process.env.GITHUB_SHA || '',
|
||||
},
|
||||
{
|
||||
name: 'AWS_DEFAULT_REGION',
|
||||
value: process.env.AWS_DEFAULT_REGION || '',
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
ParameterKey: 'AWSAccessKeyID',
|
||||
EnvironmentVariable: 'AWS_ACCESS_KEY_ID',
|
||||
ParameterValue: process.env.AWS_ACCESS_KEY_ID || '',
|
||||
},
|
||||
{
|
||||
ParameterKey: 'AWSSecretAccessKey',
|
||||
EnvironmentVariable: 'AWS_SECRET_ACCESS_KEY',
|
||||
ParameterValue: process.env.AWS_SECRET_ACCESS_KEY || '',
|
||||
},
|
||||
...defaultSecretsArray,
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
private static async DeployToSteam(
|
||||
buildUid: string,
|
||||
buildParameters: BuildParameters,
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
core.info('Starting steam deployment');
|
||||
await AWSBuildPlatform.runBuild(
|
||||
buildUid,
|
||||
buildParameters.awsStackName,
|
||||
'cm2network/steamcmd:root',
|
||||
[
|
||||
'-c',
|
||||
`
|
||||
ls
|
||||
ls /
|
||||
cp -r /${efsDirectoryName}/${buildUid}/steam/action/entrypoint.sh /entrypoint.sh;
|
||||
cp -r /${efsDirectoryName}/${buildUid}/steam/action/steps/ /steps;
|
||||
chmod -R +x /entrypoint.sh;
|
||||
chmod -R +x /steps;
|
||||
/entrypoint.sh;
|
||||
rm -r /${efsDirectoryName}/${buildUid}
|
||||
`,
|
||||
],
|
||||
`/${efsDirectoryName}`,
|
||||
`/${efsDirectoryName}/${buildUid}/steam/action/`,
|
||||
[
|
||||
{
|
||||
name: 'GITHUB_SHA',
|
||||
value: process.env.GITHUB_SHA || '',
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_APPID',
|
||||
ParameterKey: 'appId',
|
||||
ParameterValue: process.env.APP_ID || '',
|
||||
},
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_BUILDDESCRIPTION',
|
||||
ParameterKey: 'buildDescription',
|
||||
ParameterValue: process.env.BUILD_DESCRIPTION || '',
|
||||
},
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_ROOTPATH',
|
||||
ParameterKey: 'rootPath',
|
||||
ParameterValue: process.env.ROOT_PATH || '',
|
||||
},
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_RELEASEBRANCH',
|
||||
ParameterKey: 'releaseBranch',
|
||||
ParameterValue: process.env.RELEASE_BRANCH || '',
|
||||
},
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_LOCALCONTENTSERVER',
|
||||
ParameterKey: 'localContentServer',
|
||||
ParameterValue: process.env.LOCAL_CONTENT_SERVER || '',
|
||||
},
|
||||
{
|
||||
EnvironmentVariable: 'INPUT_PREVIEWENABLED',
|
||||
ParameterKey: 'previewEnabled',
|
||||
ParameterValue: process.env.PREVIEW_ENABLED || '',
|
||||
},
|
||||
...defaultSecretsArray,
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
export default RemoteBuilder;
|
||||
@@ -2,7 +2,7 @@ import * as core from '@actions/core';
|
||||
import { exec } from '@actions/exec';
|
||||
|
||||
class System {
|
||||
static async run(command, arguments_: any = [], options = {}) {
|
||||
static async run(command, arguments_: any = [], options = {}, shouldLog = true) {
|
||||
let result = '';
|
||||
let error = '';
|
||||
let debug = '';
|
||||
@@ -20,15 +20,15 @@ class System {
|
||||
};
|
||||
|
||||
const showOutput = () => {
|
||||
if (debug !== '') {
|
||||
if (debug !== '' && shouldLog) {
|
||||
core.debug(debug);
|
||||
}
|
||||
|
||||
if (result !== '') {
|
||||
if (result !== '' && shouldLog) {
|
||||
core.info(result);
|
||||
}
|
||||
|
||||
if (error !== '') {
|
||||
if (error !== '' && shouldLog) {
|
||||
core.warning(error);
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user