Fixed gitignore

This commit is contained in:
Dunemask 2022-07-18 21:43:10 +00:00
parent 338000684b
commit 61072ee032
16 changed files with 721 additions and 1 deletions

61
lib/jobs/JobManager.js Normal file
View file

@ -0,0 +1,61 @@
import { v4 } from "uuid";
import applyJob from "./k8s/kubernetes.js";
import buildJob from "./job-builder.js";
const maxJobs = process.env.MAX_JOBS ? parseInt(process.env.MAX_JOBS) : 3;
class JobManager {
constructor() {
this.clientMaxJobs = maxJobs;
this.clients = {};
}
getJob(clientId, jobId) {
return this.clients[clientId].jobs.find((j) => j.id === jobId);
}
getJobById(jobId) {
for (var client of Object.values(this.clients)) {
const job = client.jobs.find((j) => j.id === jobId);
if (!job) continue;
return job;
}
}
pushLog(jobId, log) {
const job = this.getJobById(jobId);
if (log instanceof Array) job.log.push(...log);
else job.log.push(log);
}
closeJob(jobId, exitcode) {
const job = this.getJobById(jobId);
job.exitcode = exitcode;
}
newJob(jobRequest, id) {
if (!jobRequest) throw Error("Request Must Be Object!");
if (!this.clients[id]) this.clients[id] = { jobs: [] };
const client = this.clients[id];
if (
client.jobs.filter((j) => j.exitcode === undefined).length >=
this.clientMaxJobs
)
throw Error("Client's Active Jobs Exceeded!");
const job = buildJob(jobRequest, id);
job.id = v4();
job.log = [];
this.clients[id].jobs.push(job);
applyJob(job);
return { ...job };
}
removeJob(clientId, id) {
this.clients[clientId].jobs = this.clients[clientId].jobs.filter(
(j) => j.id !== id
);
}
}
export default new JobManager();

View file

@ -0,0 +1,48 @@
import { URL } from "url";
import loadConfigFile from "rollup/loadConfigFile";
import path from "path";
import { rollup } from "rollup";
import caxa from "caxa";
import { verify, normalize } from "./executor-configurator.js";
const { default: executorConfig } = await import(path.resolve("executor.config.js"));
const __dirname = new URL(".", import.meta.url).pathname;
const { default: caxaPackage } = caxa;
function testConfig() {
console.log("Testing config");
verify(normalize(executorConfig([])));
}
async function packageBin() {
console.log("Packaging bundle into binary");
return caxaPackage({
input: "dist/bundles/",
output: "bin/executor",
command: [
"{{caxa}}/node_modules/.bin/node",
"{{caxa}}/qualiteer-executor.mjs",
],
uncompressionMessage: "Unpacking, please wait...",
});
}
async function rollupBundle() {
console.log("Rolling up executor into bundle");
const { options, warnings } = await loadConfigFile(
path.resolve(__dirname, "rollup.config.js")
);
console.log(`Rollup has ${warnings.count} warnings`);
warnings.flush();
for (const optionsObj of options) {
const bundle = await rollup(optionsObj);
await Promise.all(optionsObj.output.map(bundle.write));
}
}
testConfig();
await rollupBundle();
await packageBin();
console.log("Done");

View file

@ -0,0 +1,17 @@
const funcify = (v) => ()=> v;
export function verify(config) {
for (var k in config) {
if (typeof config[k] !== "function")
throw Error("All config options must be functions!");
}
}
export function normalize(conf) {
const config = { ...conf };
for (var k in config) {
if (typeof config[k] === "function") continue;
config[k] = funcify(config[k]);
}
return config;
}

View file

@ -0,0 +1,13 @@
import path from "node:path";
import Executor from "../../sockets/clients/Executor.js";
import { normalize } from "./executor-configurator.js";
const { default: executorConfig } = await import(
path.resolve("executor.config.js")
);
// Load config and args
const args = process.argv.slice(2);
const config = normalize(executorConfig(args));
// Start Executor
const exec = new Executor(args, config);
exec.runJob();

View file

@ -0,0 +1,11 @@
import { nodeResolve } from "@rollup/plugin-node-resolve";
import commonjs from "@rollup/plugin-commonjs";
import { terser } from "rollup-plugin-terser";
export default {
input: "lib/jobs/executor/executor-entrypoint.js",
output: {
file: "dist/bundles/qualiteer-executor.mjs",
},
plugins: [nodeResolve(), commonjs(), terser()],
};

59
lib/jobs/job-builder.js Normal file
View file

@ -0,0 +1,59 @@
const baseCommand = "node";
const suiteEntry = "tests/assets/suite/runner.js";
const pipelineMapping = [
{
id: 0,
pipeline: [{ name: "primary" }, { name: "secondary", delay: 5000 }],
},
];
const buildCommon = (jobRequest) => {
const { testName } = jobRequest;
if (!testName) throw Error("'testName' must be provided!");
const command = [baseCommand, suiteEntry, `test=${testName}`];
// Apply Common Flags
command.push("isRetry=false");
// Return new request
return { ...jobRequest, command };
};
const buildSingle = (jobReq) => jobReq;
const buildMarker = (jobReq) => {};
const buildProject = (jobReq) => {};
const pipelineMaxLife = (testName) => {
const pipelines = pipelineMapping
.filter((m) => m.pipeline.find((t) => t.name === testName))
.map((m) => m.pipeline);
return Math.max(pipelines.map((p) => p.length)) + 1;
};
const buildCompound = (jobReq, socketId) => {
const { testName, command } = jobReq;
const pipelineTriggers = jobReq.pipelineTriggers;
if (pipelineTriggers) command.push(`pipelineTriggers=${pipelineTriggers}`);
command.push(`pipelineDashboardSocket=${socketId}`);
return { ...jobReq, command };
};
function nextCompound(previousTest) {}
export default function jobBuilder(jobRequest, id) {
const jobReq = buildCommon(jobRequest, id);
switch (jobRequest.type) {
case "single":
return buildSingle(jobReq);
case "marker":
return buildMarker(jobReq);
case "project":
return buildProject(jobReq);
case "compound":
return buildCompound(jobReq, id);
default:
throw Error("No Job Request Type Specified!");
}
}

9
lib/jobs/job-executor.js Normal file
View file

@ -0,0 +1,9 @@
import Executor from "../sockets/clients/Executor.js";
const args = process.argv.slice(2);
const url = args[0];
const jobId = args[1];
const command = args.slice(2);
const job = { id: jobId, command };
const exec = new Executor(url, job, command);
exec.runJob();

View file

@ -0,0 +1,14 @@
import { INFO, ERR, OK, VERB } from "../../util/logging.js";
import cp from "node:child_process";
const jobStr = process.argv.slice(2)[0];
const job = JSON.parse(jobStr);
const { command } = job.spec.template.spec.containers[0];
INFO("EXEC", "Internal Executor Starting!");
cp.exec(command, (error, stdout, stderr) => {
if (error) ERR("EXEC", error);
//if(stdout) VERB("EXEC-STDOUT", stdout);
//if(stderr) VERB("EXEC-STDERR", stderr);
OK("EXEC", "Internal Executor Finished!");
process.exit(error ? 1 : 0);
});

33
lib/jobs/k8s/k8s-job.json Normal file
View file

@ -0,0 +1,33 @@
{
"apiVersion": "batch/v1",
"kind": "Job",
"metadata": {
"name": "qltr-job-test-suite-1"
},
"spec": {
"template": {
"spec": {
"containers": [
{
"resources": {
"requests": {
"memory": "64MI",
"cpu": "250m"
},
"limits": {
"memory": "128MI",
"cpu": "500m"
}
},
"name": "qltr-job-test-suite-1",
"image": "node",
"imagePullPolicy": "Always",
"command": ["node", "--version"]
}
],
"restartPolicy": "Never"
}
},
"backoffLimit": 4
}
}

View file

@ -0,0 +1,84 @@
import cp from "child_process";
import fs from "fs";
import path from "path";
const internalDeploy = process.env.INTERNAL_DEPLOY === "true";
const executorUrl = process.env.EXECUTOR_URL;
const executorScriptOnly = process.env.EXECUTOR_SCRIPT_ONLY === "true";
const executorBin =
process.env.EXECUTOR_BIN ?? `qltr-executor${executorScriptOnly ? ".js" : ""}`;
const qualiteerUrl =
process.env.QUALITEER_URL ?? "file:///home/runner/Qualiteer/bin/executor";
const kubCmd = "kubectl apply -f";
const jobsDir = "jobs/";
const defaults = JSON.parse(
fs.readFileSync(path.resolve("./lib/jobs/k8s/k8s-job.json"))
);
const wrapCommand = (jobId, command) => {
const bin = executorScriptOnly
? `node ${executorBin}`
: `chmod +x ${executorBin} && ./${executorBin}`;
const cmd = command.map((arg) => JSON.stringify(arg));
const curlCmd = `curl -o qltr-executor ${executorUrl} && ${bin} ${qualiteerUrl} ${jobId} ${cmd.join(
" "
)}`;
return curlCmd;
};
const createFile = (job) => {
const { name } = job.metadata;
const jobsPath = path.resolve(jobsDir);
if (!fs.existsSync(jobsPath)) fs.mkdirSync(jobsPath);
const filePath = path.resolve(jobsDir, `${name}.json`);
fs.writeFileSync(filePath, JSON.stringify(job));
return filePath;
};
const applyFileInternally = (filePath) => {
const job = fs.readFileSync(filePath, { encoding: "utf8" });
cp.fork(path.resolve("./lib/jobs/k8s/k8s-bypass.js"), [job]);
};
const applyFile = async (filePath) => {
const command = `${kubCmd} ${filePath}`;
return new Promise((res, rej) =>
cp.exec(command, (err, stdout, stderr) => (err && rej(err)) || res(stdout))
);
};
const deleteFile = (filePath) => fs.unlinkSync(filePath);
const jobBuilder = (jobRequest) => {
const { resources, name, image, command, id: jobId } = jobRequest;
// Safety Checks
if (!jobId) throw Error("'jobId' required!");
if (!name) throw Error("'name' required!");
if (!command) throw Error("'command' required!");
if (!image) throw Error("'image' required!");
if (!Array.isArray(command)) throw Error("'command' must be an array!");
// Apply configuration
const job = { ...defaults };
job.metadata.name = `qltr-${name}-${jobId}`;
const container = job.spec.template.spec.containers[0];
container.name = job.metadata.name;
container.command = wrapCommand(jobId, command);
container.image = JSON.stringify(image);
// Apply resources
job.resources = { ...job.resources, ...resources };
return job;
};
export default async function createJob(jobRequest) {
const job = jobBuilder(jobRequest);
const filePath = createFile(job);
if (!internalDeploy) await applyFile(filePath);
else await applyFileInternally(filePath);
deleteFile(filePath);
}