Removed libold
This commit is contained in:
parent
a90c28dd76
commit
08539db60b
51 changed files with 0 additions and 1900 deletions
|
@ -1,38 +0,0 @@
|
||||||
import { URL } from "node:url";
|
|
||||||
import path from "node:path";
|
|
||||||
import caxa from "caxa";
|
|
||||||
import { rollup } from "rollup";
|
|
||||||
import loadConfigFile from "rollup/loadConfigFile";
|
|
||||||
import { executorLibraryDir, binName, scriptName } from "./executor-config.js";
|
|
||||||
// Fix import
|
|
||||||
const { default: caxaPackage } = caxa;
|
|
||||||
// Rollup Config
|
|
||||||
const rollupConfigPath = path.resolve(executorLibraryDir, "rollup.config.js");
|
|
||||||
|
|
||||||
// Build functions
|
|
||||||
async function packageBin() {
|
|
||||||
console.log("Packaging bundle into binary");
|
|
||||||
return caxaPackage({
|
|
||||||
input: "dist/bundles/",
|
|
||||||
output: `bin/${binName}`,
|
|
||||||
command: ["{{caxa}}/node_modules/.bin/node", `{{caxa}}/${scriptName}`],
|
|
||||||
uncompressionMessage: "Unpacking, please wait...",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function rollupBundle() {
|
|
||||||
console.log("Rolling up executor into bundle");
|
|
||||||
const { options, warnings } = await loadConfigFile(rollupConfigPath);
|
|
||||||
if (warnings.count !== 0)
|
|
||||||
console.log(`Rollup has ${warnings.count} warnings`);
|
|
||||||
warnings.flush();
|
|
||||||
|
|
||||||
for (const optionsObj of options) {
|
|
||||||
const bundle = await rollup(optionsObj);
|
|
||||||
await Promise.all(optionsObj.output.map(bundle.write));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await rollupBundle();
|
|
||||||
await packageBin();
|
|
||||||
console.log("Done");
|
|
|
@ -1,5 +0,0 @@
|
||||||
export const executorLibraryDir = new URL(".", import.meta.url).pathname;
|
|
||||||
export const binName = "qltr-executor";
|
|
||||||
export const configName = "executor.config.mjs";
|
|
||||||
export const scriptName = "qualiteer-executor.mjs";
|
|
||||||
export const entrypointName = "executor-entrypoint.js";
|
|
|
@ -1,17 +0,0 @@
|
||||||
const funcify = (v) => () => v;
|
|
||||||
|
|
||||||
export function verify(config) {
|
|
||||||
for (var k in config) {
|
|
||||||
if (typeof config[k] !== "function")
|
|
||||||
throw Error("All config options must be functions!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function normalize(conf) {
|
|
||||||
const config = { ...conf };
|
|
||||||
for (var k in config) {
|
|
||||||
if (typeof config[k] === "function") continue;
|
|
||||||
config[k] = funcify(config[k]);
|
|
||||||
}
|
|
||||||
return config;
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
import path from "node:path";
|
|
||||||
import Executor from "../sockets/clients/Executor.js";
|
|
||||||
import { normalize } from "./executor-configurator.js";
|
|
||||||
import { configName as executorConfigName } from "./executor-config.js";
|
|
||||||
const executorConfigPath = path.resolve(executorConfigName);
|
|
||||||
const { default: executorConfig } = await import(executorConfigPath);
|
|
||||||
|
|
||||||
// Load config and args
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
const payload = JSON.parse(Buffer.from(args[0], "base64").toString("utf8"));
|
|
||||||
const config = normalize(executorConfig(payload));
|
|
||||||
// Start Executor
|
|
||||||
const exec = new Executor(config, payload);
|
|
||||||
exec.runJob();
|
|
|
@ -1,17 +0,0 @@
|
||||||
import path from "node:path";
|
|
||||||
import { nodeResolve } from "@rollup/plugin-node-resolve";
|
|
||||||
import commonjs from "@rollup/plugin-commonjs";
|
|
||||||
import { terser } from "rollup-plugin-terser";
|
|
||||||
import {
|
|
||||||
executorLibraryDir,
|
|
||||||
entrypointName,
|
|
||||||
scriptName,
|
|
||||||
} from "./executor-config.js";
|
|
||||||
|
|
||||||
export default {
|
|
||||||
input: path.resolve(executorLibraryDir, entrypointName),
|
|
||||||
output: {
|
|
||||||
file: `dist/bundles/${scriptName}`,
|
|
||||||
},
|
|
||||||
plugins: [nodeResolve(), commonjs(), terser()],
|
|
||||||
};
|
|
|
@ -1,76 +0,0 @@
|
||||||
import io from "socket.io-client";
|
|
||||||
import cp from "child_process";
|
|
||||||
|
|
||||||
import modes from "../modes.js";
|
|
||||||
import events from "../events.js";
|
|
||||||
|
|
||||||
export { default as events } from "../events.js";
|
|
||||||
export { default as modes } from "../modes.js";
|
|
||||||
|
|
||||||
// Data Stream Types
|
|
||||||
const ERR = "e";
|
|
||||||
const OUT = "o";
|
|
||||||
|
|
||||||
export default class Executor {
|
|
||||||
constructor(config, payload) {
|
|
||||||
this.url = config.url(payload) ?? process.env.QUALITEER_EXECUTOR_URL;
|
|
||||||
this.jobId = config.jobId(payload) ?? process.env.QUALITEER_JOB_ID;
|
|
||||||
this.command = config.command(payload) ?? process.env.QUALITEER_COMMAND;
|
|
||||||
this.mode = modes.EXEC;
|
|
||||||
|
|
||||||
// Internal Buffer
|
|
||||||
this.buf = {};
|
|
||||||
this.buf[ERR] = "";
|
|
||||||
this.buf[OUT] = "";
|
|
||||||
|
|
||||||
// Methods
|
|
||||||
this.spawn = this.spawn.bind(this);
|
|
||||||
this.report = this.report.bind(this);
|
|
||||||
this.onProcClose = this.onProcClose.bind(this);
|
|
||||||
this.onClose = this.onClose.bind(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
spawn() {
|
|
||||||
const cmdArgs = this.command;
|
|
||||||
const cmd = cmdArgs.shift();
|
|
||||||
this.proc = cp.spawn(cmd, cmdArgs);
|
|
||||||
|
|
||||||
// Set Encoding
|
|
||||||
this.proc.stdout.setEncoding("utf8");
|
|
||||||
this.proc.stderr.setEncoding("utf8");
|
|
||||||
|
|
||||||
// Process Events
|
|
||||||
this.proc.stdout.on("data", (d) => this.report(d.toString(), OUT));
|
|
||||||
this.proc.stderr.on("data", (d) => this.report(d.toString(), ERR));
|
|
||||||
this.proc.on("close", this.onProcClose);
|
|
||||||
}
|
|
||||||
|
|
||||||
runJob() {
|
|
||||||
this.socket = io(this.url, {
|
|
||||||
query: { mode: this.mode, jobId: this.jobId },
|
|
||||||
});
|
|
||||||
this.socket.on("connect", this.spawn);
|
|
||||||
this.socket.on("disconnect", this.onClose);
|
|
||||||
}
|
|
||||||
|
|
||||||
onClose() {
|
|
||||||
console.log("Server disconnected, terminating process.");
|
|
||||||
if (this.proc) this.proc.kill("SIGKILL");
|
|
||||||
}
|
|
||||||
|
|
||||||
onProcClose(code) {
|
|
||||||
this.socket.emit(events.JOB_CLS, code, () => this.socket.disconnect());
|
|
||||||
console.log(`Process finished with code ${code}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
report(d, dType) {
|
|
||||||
this.buf[dType] += d;
|
|
||||||
if (!this.buf[dType].includes("\n")) return;
|
|
||||||
if (this.buf[dType].endsWith("\n"))
|
|
||||||
this.buf[dType] = this.buf[dType].slice(0, -1);
|
|
||||||
this.socket.emit(events.JOB_REP, this.buf[dType]);
|
|
||||||
if (dType === ERR) console.error(`err: ${this.buf[dType]}`);
|
|
||||||
else console.log(`out: ${this.buf[dType]}`);
|
|
||||||
this.buf[dType] = "";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,105 +0,0 @@
|
||||||
import { io } from "socket.io-client";
|
|
||||||
import modes from "../modes.js";
|
|
||||||
import events from "../events.js";
|
|
||||||
|
|
||||||
export { default as events } from "../events.js";
|
|
||||||
export { default as modes } from "../modes.js";
|
|
||||||
|
|
||||||
export default class Initiator {
|
|
||||||
constructor(url, options = {}) {
|
|
||||||
this.url = url;
|
|
||||||
this.mode = modes.INIT;
|
|
||||||
this.onLog = options.onLog ?? ((d) => console.log(`job: ${d}`));
|
|
||||||
this.onClose = options.onClose ?? (() => {});
|
|
||||||
this.onCreate = options.onCreate ?? ((id) => console.log(`job id: ${id}`));
|
|
||||||
this.onPipelineClose =
|
|
||||||
options.onPipelineClose ??
|
|
||||||
(() => {
|
|
||||||
console.log("job pipeline closed");
|
|
||||||
});
|
|
||||||
this.sk = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async newJob(jobRequest, onLog, onClose, onCreate) {
|
|
||||||
onLog = onLog ?? this.onLog.bind(this);
|
|
||||||
onClose = onClose ?? this.onClose.bind(this);
|
|
||||||
onCreate = onCreate ?? this.onCreate.bind(this);
|
|
||||||
const sk = io(this.url, {
|
|
||||||
query: { mode: this.mode, job: JSON.stringify(jobRequest) },
|
|
||||||
});
|
|
||||||
sk.on(events.JOB_LOG, onLog);
|
|
||||||
sk.on(events.JOB_CLS, function onJobClose(c) {
|
|
||||||
sk.disconnect();
|
|
||||||
onClose(c);
|
|
||||||
});
|
|
||||||
this.sk = sk;
|
|
||||||
return new Promise((res) =>
|
|
||||||
sk.on(events.JOB_CRT, function onJobCreate(id) {
|
|
||||||
onCreate(id);
|
|
||||||
res({ ...jobRequest, id });
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async newPipelineJob(
|
|
||||||
jobRequest,
|
|
||||||
onLog,
|
|
||||||
onClose,
|
|
||||||
onCreate,
|
|
||||||
onPipelineTrigger,
|
|
||||||
onPipelineClose
|
|
||||||
) {
|
|
||||||
onLog = onLog ?? this.onLog.bind(this);
|
|
||||||
onClose = onClose ?? this.onClose.bind(this);
|
|
||||||
onCreate = onCreate ?? this.onCreate.bind(this);
|
|
||||||
onPipelineTrigger =
|
|
||||||
onPipelineTrigger ??
|
|
||||||
((pipeline) => {
|
|
||||||
console.log("job trg:", pipeline);
|
|
||||||
const { triggers } = pipeline;
|
|
||||||
if (!Object.keys(triggers).length) onPipelineClose();
|
|
||||||
// For each trigger
|
|
||||||
for (var testName in triggers) {
|
|
||||||
const delay = triggers[testName].__testDelay ?? 0;
|
|
||||||
delete triggers[testName].__testDelay;
|
|
||||||
const jobReq = {
|
|
||||||
...jobRequest,
|
|
||||||
pipeline: {
|
|
||||||
...pipeline,
|
|
||||||
triggers: triggers[testName],
|
|
||||||
__test: testName,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
setTimeout(
|
|
||||||
() =>
|
|
||||||
this.newPipelineJob(
|
|
||||||
jobReq,
|
|
||||||
onLog,
|
|
||||||
onClose,
|
|
||||||
onCreate,
|
|
||||||
onPipelineTrigger,
|
|
||||||
onPipelineClose
|
|
||||||
),
|
|
||||||
delay
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
onPipelineClose = onPipelineClose ?? this.onPipelineClose.bind(this);
|
|
||||||
const sk = io(this.url, {
|
|
||||||
query: { mode: this.mode, job: JSON.stringify(jobRequest) },
|
|
||||||
});
|
|
||||||
sk.on(events.JOB_LOG, onLog);
|
|
||||||
sk.on(events.JOB_CLS, function onJobClose(c) {
|
|
||||||
sk.disconnect();
|
|
||||||
onClose(c);
|
|
||||||
});
|
|
||||||
sk.on(events.PPL_TRG, onPipelineTrigger);
|
|
||||||
this.sk = sk;
|
|
||||||
return new Promise((res) =>
|
|
||||||
sk.on(events.JOB_CRT, function onJobCreate(id) {
|
|
||||||
onCreate(id);
|
|
||||||
res({ ...jobRequest, id });
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
import io from "socket.io-client";
|
|
||||||
import modes from "../modes.js";
|
|
||||||
import events from "../events.js";
|
|
||||||
|
|
||||||
export { default as events } from "../events.js";
|
|
||||||
export { default as modes } from "../modes.js";
|
|
||||||
|
|
||||||
export default class Viewer {
|
|
||||||
constructor(url, options = {}) {
|
|
||||||
this.url = url;
|
|
||||||
this.mode = modes.VIEW;
|
|
||||||
this.onLog = options.onLog ?? console.log;
|
|
||||||
this.onClose = options.onClose ?? (() => {});
|
|
||||||
}
|
|
||||||
|
|
||||||
viewJob(jobId, onLog, onClose) {
|
|
||||||
onLog = onLog ?? this.onLog.bind(this);
|
|
||||||
onClose = onClose ?? this.onClose.bind(this);
|
|
||||||
const sk = io(this.url, {
|
|
||||||
query: { mode: this.mode, jobId },
|
|
||||||
});
|
|
||||||
sk.on(events.JOB_LOG, onLog);
|
|
||||||
sk.on(events.JOB_CLS, onClose);
|
|
||||||
return sk;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
export { default as Initiator } from "./Initiator.js";
|
|
||||||
|
|
||||||
export { default as Viewer } from "./Viewer.js";
|
|
||||||
|
|
||||||
export { default as Executor } from "./Executor.js";
|
|
|
@ -1,3 +0,0 @@
|
||||||
export { default as Initiator } from "./Initiator.js";
|
|
||||||
|
|
||||||
export { default as Viewer } from "./Viewer.js";
|
|
|
@ -1,15 +0,0 @@
|
||||||
const JOB_REP = "jr"; // Job Report Event
|
|
||||||
const JOB_LOG = "jl"; // Job Log Event
|
|
||||||
const JOB_CLS = "jc"; // Job Close Event
|
|
||||||
const JOB_CRT = "jcr"; // Job Create Event
|
|
||||||
const PPL_TRG = "plr"; // Pipeline Trigger Event
|
|
||||||
const ERR = "e"; // Socket Error
|
|
||||||
|
|
||||||
export default {
|
|
||||||
JOB_REP,
|
|
||||||
JOB_LOG,
|
|
||||||
JOB_CLS,
|
|
||||||
JOB_CRT,
|
|
||||||
PPL_TRG,
|
|
||||||
ERR,
|
|
||||||
};
|
|
|
@ -1,8 +0,0 @@
|
||||||
const INIT = "i"; // Intiator Socket
|
|
||||||
const EXEC = "e"; // Execution Socket
|
|
||||||
const VIEW = "v"; // View Socket
|
|
||||||
export default {
|
|
||||||
INIT,
|
|
||||||
EXEC,
|
|
||||||
VIEW,
|
|
||||||
};
|
|
|
@ -1 +0,0 @@
|
||||||
export { default } from "./server/core/Qualiteer.js";
|
|
|
@ -1,71 +0,0 @@
|
||||||
import { v4 } from "uuid";
|
|
||||||
import { getTest } from "../database/queries/catalog.js";
|
|
||||||
import applyJobInternally from "../k8s/k8s-internal.js";
|
|
||||||
import applyJob from "../k8s/k8s.js";
|
|
||||||
|
|
||||||
const maxJobs = process.env.MAX_JOBS ? parseInt(process.env.MAX_JOBS) : 3;
|
|
||||||
const internalDeploy = process.env.INTERNAL_DEPLOY === "true";
|
|
||||||
const launchJob = internalDeploy ? applyJobInternally : applyJob;
|
|
||||||
|
|
||||||
async function getTests(job) {
|
|
||||||
if (job.pipeline) return [await getTest(job.pipeline.__test)];
|
|
||||||
if (!job.testNames) return [];
|
|
||||||
const tests = await Promise.all(job.testNames.map((name) => getTest(name)));
|
|
||||||
return tests;
|
|
||||||
}
|
|
||||||
|
|
||||||
class JobManager {
|
|
||||||
constructor() {
|
|
||||||
this.clientMaxJobs = maxJobs;
|
|
||||||
this.clients = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
getJob(clientId, jobId) {
|
|
||||||
return this.clients[clientId].jobs.find((j) => j.id === jobId);
|
|
||||||
}
|
|
||||||
|
|
||||||
getJobById(jobId) {
|
|
||||||
for (var client of Object.values(this.clients)) {
|
|
||||||
const job = client.jobs.find((j) => j.id === jobId);
|
|
||||||
if (!job) continue;
|
|
||||||
return job;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pushLog(jobId, log) {
|
|
||||||
const job = this.getJobById(jobId);
|
|
||||||
if (!job) return;
|
|
||||||
|
|
||||||
if (log instanceof Array) job.log.push(...log);
|
|
||||||
else job.log.push(log);
|
|
||||||
}
|
|
||||||
|
|
||||||
closeJob(jobId, exitcode) {
|
|
||||||
const job = this.getJobById(jobId);
|
|
||||||
if (!job) return;
|
|
||||||
job.exitcode = exitcode;
|
|
||||||
}
|
|
||||||
|
|
||||||
async newJob(jobRequest, id) {
|
|
||||||
if (!jobRequest) throw Error("Request Must Be Object!");
|
|
||||||
if (!this.clients[id]) this.clients[id] = { jobs: [] };
|
|
||||||
const job = { ...jobRequest };
|
|
||||||
job.image = "registry.dunemask.net/garden/dev/reed:latest";
|
|
||||||
job.id = v4();
|
|
||||||
job.log = [];
|
|
||||||
this.clients[id].jobs.push(job);
|
|
||||||
job.dashboardSocketId = id;
|
|
||||||
job.tests = await getTests(job);
|
|
||||||
for (var t of job.tests) if (!t) throw Error("1 or more tests not found!");
|
|
||||||
launchJob(job);
|
|
||||||
return { ...job };
|
|
||||||
}
|
|
||||||
|
|
||||||
removeJob(clientId, id) {
|
|
||||||
this.clients[clientId].jobs = this.clients[clientId].jobs.filter(
|
|
||||||
(j) => j.id !== id
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default new JobManager();
|
|
|
@ -1,56 +0,0 @@
|
||||||
// Imports
|
|
||||||
import fig from "figlet";
|
|
||||||
import http from "http";
|
|
||||||
import express from "express";
|
|
||||||
import { INFO, OK, logInfo } from "../util/logging.js";
|
|
||||||
|
|
||||||
// Import Core Modules
|
|
||||||
import buildRoutes from "../routes/router.js";
|
|
||||||
import pg from "../database/postgres.js";
|
|
||||||
import injectSockets from "./socket-server.js";
|
|
||||||
import JobManager from "./JobManager.js";
|
|
||||||
import buildRabbiteer from "../rabbit/rabbit-workers.js";
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const title = "QLTR";
|
|
||||||
const rabbiteerEnabled = process.env.QUALITEER_RABBITEER_ENABLED !== "false";
|
|
||||||
const port = process.env.QUALITEER_DEV_PORT ?? 52000;
|
|
||||||
|
|
||||||
// Class
|
|
||||||
export default class Qualiteer {
|
|
||||||
constructor(options = {}) {
|
|
||||||
for (var k in options) this[k] = options[k];
|
|
||||||
this.jobs = JobManager;
|
|
||||||
this.port = options.port ?? port;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _preinitialize() {
|
|
||||||
logInfo(fig.textSync(title, "Cyberlarge"));
|
|
||||||
INFO("INIT", "Initializing...");
|
|
||||||
this.app = express();
|
|
||||||
this.pg = pg;
|
|
||||||
this.server = http.createServer(this.app);
|
|
||||||
this.sockets = injectSockets(this.server, this.jobs);
|
|
||||||
this.routes = buildRoutes(this.pg, this.sockets);
|
|
||||||
this.rabbiteer = buildRabbiteer(this.pg, this.sockets);
|
|
||||||
this.app.use(this.routes);
|
|
||||||
}
|
|
||||||
|
|
||||||
async _connect() {
|
|
||||||
await this.pg.connect();
|
|
||||||
if (!rabbiteerEnabled) return;
|
|
||||||
await this.rabbiteer.connect();
|
|
||||||
}
|
|
||||||
|
|
||||||
start() {
|
|
||||||
const qt = this;
|
|
||||||
return new Promise(async function init(res) {
|
|
||||||
qt._preinitialize();
|
|
||||||
await qt._connect();
|
|
||||||
qt.server.listen(qt.port, function onStart() {
|
|
||||||
OK("SERVER", `Running on ${qt.port}`);
|
|
||||||
res();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,34 +0,0 @@
|
||||||
import evt from "../../common/sockets/events.js";
|
|
||||||
|
|
||||||
export const initiator = async (socket, jobs) => {
|
|
||||||
const jobStr = socket.handshake.query.job;
|
|
||||||
const jobReq = JSON.parse(jobStr);
|
|
||||||
console.log(jobReq);
|
|
||||||
if (!jobReq || !(jobReq instanceof Object))
|
|
||||||
throw Error("No 'job' was included in the handshake query");
|
|
||||||
|
|
||||||
const job = await jobs.newJob(jobReq, socket.id);
|
|
||||||
socket.join(job.id);
|
|
||||||
socket.emit(evt.JOB_CRT, job.id);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const executor = (io, socket, jobs) => {
|
|
||||||
const jobId = socket.handshake.query.jobId;
|
|
||||||
if (!jobId) throw Error("No 'jobId' was included in the handshake query");
|
|
||||||
|
|
||||||
socket.join(jobId);
|
|
||||||
socket.on(evt.JOB_REP, function onReport(log) {
|
|
||||||
jobs.pushLog(jobId, log);
|
|
||||||
io.to(jobId).emit(evt.JOB_LOG, log);
|
|
||||||
});
|
|
||||||
socket.on(evt.JOB_CLS, function onClose(code) {
|
|
||||||
jobs.closeJob(jobId, code);
|
|
||||||
io.to(jobId).emit(evt.JOB_CLS, code);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const viewer = (socket) => {
|
|
||||||
const jobId = socket.handshake.query.jobId;
|
|
||||||
if (!jobId) throw Error("No 'jobId' was included in the handshake query");
|
|
||||||
socket.join(jobId);
|
|
||||||
};
|
|
|
@ -1,13 +0,0 @@
|
||||||
import cron from "cron";
|
|
||||||
const { CronJob } = cron;
|
|
||||||
|
|
||||||
// Remove Expired Silenced Tests
|
|
||||||
const expiredSilenced = () => {
|
|
||||||
console.log("Would Update Silenced Tests");
|
|
||||||
};
|
|
||||||
|
|
||||||
const silencedCron = new CronJob("* * * * * *", expiredSilenced);
|
|
||||||
|
|
||||||
export default async function startCrons() {
|
|
||||||
silencedCron.start();
|
|
||||||
}
|
|
|
@ -1,54 +0,0 @@
|
||||||
import { Server as Skio } from "socket.io";
|
|
||||||
import evt from "../../common/sockets/events.js";
|
|
||||||
import modes from "../../common/sockets/modes.js";
|
|
||||||
|
|
||||||
import { initiator, executor, viewer } from "./client-listeners.js";
|
|
||||||
|
|
||||||
const socketDrop = (io, room, id) => {
|
|
||||||
const { rooms } = io.of("/").adapter;
|
|
||||||
const clients = rooms.get(room);
|
|
||||||
if (clients.size > 1 || clients.size === 0) return;
|
|
||||||
const socketId = Array.from(clients)[0];
|
|
||||||
const s = io.sockets.sockets.get(socketId);
|
|
||||||
s.disconnect();
|
|
||||||
};
|
|
||||||
|
|
||||||
const socketConnect = async (io, socket, jobs) => {
|
|
||||||
const { mode } = socket.handshake.query;
|
|
||||||
try {
|
|
||||||
switch (mode) {
|
|
||||||
case modes.INIT:
|
|
||||||
await initiator(socket, jobs);
|
|
||||||
break;
|
|
||||||
case modes.EXEC:
|
|
||||||
executor(io, socket, jobs);
|
|
||||||
break;
|
|
||||||
case modes.VIEW:
|
|
||||||
viewer(socket);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
socket.send(evt.ERR, "Invalid Mode!");
|
|
||||||
socket.disconnect();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.log(err);
|
|
||||||
socket.send(evt.ERR, err);
|
|
||||||
socket.disconnect();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const socketAuth = (socket, next) => {
|
|
||||||
const { token } = socket.handshake.auth;
|
|
||||||
// next(new Error("Bad Token"));
|
|
||||||
next();
|
|
||||||
};
|
|
||||||
|
|
||||||
const applySockets = (server, jobs, options) => {
|
|
||||||
const io = new Skio(server);
|
|
||||||
io.on("connection", (socket) => socketConnect(io, socket, jobs));
|
|
||||||
io.of("/").adapter.on("leave-room", (room, id) => socketDrop(io, room, id));
|
|
||||||
return io;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default applySockets;
|
|
|
@ -1,23 +0,0 @@
|
||||||
const seconds = 1000;
|
|
||||||
const minutes = 60 * seconds;
|
|
||||||
const hours = 60 * minutes;
|
|
||||||
export const DELAYS = {
|
|
||||||
"1sec": 1 * seconds,
|
|
||||||
"5sec": 5 * seconds,
|
|
||||||
"10sec": 10 * seconds,
|
|
||||||
"30sec": 30 * seconds,
|
|
||||||
"1min": 1 * minutes,
|
|
||||||
"5min": 5 * minutes,
|
|
||||||
"10min": 10 * minutes,
|
|
||||||
"15min": 15 * minutes,
|
|
||||||
"30min": 30 * minutes,
|
|
||||||
"1hour": 1 * hours,
|
|
||||||
"2hour": 2 * hours,
|
|
||||||
"3hour": 3 * hours,
|
|
||||||
"4hour": 4 * hours,
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function getDelay(delayStr) {
|
|
||||||
if (DELAYS[delayStr]) return DELAYS[delayStr];
|
|
||||||
return 0;
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
CREATE SEQUENCE catalog_id_seq;
|
|
||||||
CREATE TABLE catalog (
|
|
||||||
id bigint NOT NULL DEFAULT nextval('catalog_id_seq') PRIMARY KEY,
|
|
||||||
name varchar(255) DEFAULT NULL,
|
|
||||||
class varchar(255) DEFAULT NULL,
|
|
||||||
image varchar(255) DEFAULT NULL,
|
|
||||||
"path" varchar(255) DEFAULT NULL,
|
|
||||||
description varchar(1023) DEFAULT NULL,
|
|
||||||
type varchar(31) DEFAULT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT now(),
|
|
||||||
mr varchar(255) DEFAULT NULL,
|
|
||||||
tags varchar(255)[] DEFAULT NULL,
|
|
||||||
crons varchar(127)[] DEFAULT NULL,
|
|
||||||
env varchar(31)[] DEFAULT NULL,
|
|
||||||
regions varchar(15)[] DEFAULT NULL,
|
|
||||||
triggers varchar(255)[] DEFAULT NULL,
|
|
||||||
pipeline BOOLEAN DEFAULT FALSE,
|
|
||||||
coverage varchar(255)[] DEFAULT NULL,
|
|
||||||
projects varchar(255)[] DEFAULT NULL,
|
|
||||||
delay varchar(31) DEFAULT NULL,
|
|
||||||
CONSTRAINT unique_name UNIQUE(name)
|
|
||||||
);
|
|
||||||
ALTER SEQUENCE catalog_id_seq OWNED BY catalog.id;
|
|
|
@ -1,15 +0,0 @@
|
||||||
CREATE SEQUENCE results_id_seq;
|
|
||||||
CREATE TABLE results (
|
|
||||||
id bigint NOT NULL DEFAULT nextval('results_id_seq') PRIMARY KEY,
|
|
||||||
name varchar(255) DEFAULT NULL,
|
|
||||||
class varchar(255) DEFAULT NULL,
|
|
||||||
"method" varchar(255) DEFAULT NULL,
|
|
||||||
env varchar(31) DEFAULT NULL,
|
|
||||||
"timestamp" TIMESTAMP NOT NULL DEFAULT now(),
|
|
||||||
triage BOOLEAN DEFAULT FALSE,
|
|
||||||
failed BOOLEAN DEFAULT FALSE,
|
|
||||||
message varchar(2047) DEFAULT NULL,
|
|
||||||
screenshot varchar(255) DEFAULT NULL,
|
|
||||||
console varchar(255) DEFAULT NULL
|
|
||||||
);
|
|
||||||
ALTER SEQUENCE results_id_seq OWNED BY results.id;
|
|
|
@ -1,9 +0,0 @@
|
||||||
CREATE SEQUENCE alerting_id_seq;
|
|
||||||
CREATE TABLE alerting (
|
|
||||||
id bigint NOT NULL DEFAULT nextval('alerting_id_seq') PRIMARY KEY,
|
|
||||||
name varchar(255) DEFAULT NULL,
|
|
||||||
class varchar(255) DEFAULT NULL,
|
|
||||||
"method" varchar(255) DEFAULT NULL,
|
|
||||||
expires TIMESTAMP NOT NULL DEFAULT now()
|
|
||||||
);
|
|
||||||
ALTER SEQUENCE alerting_id_seq OWNED BY alerting.id;
|
|
|
@ -1,11 +0,0 @@
|
||||||
export const silencedMock = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: `failing`,
|
|
||||||
class: `failing.js`,
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
id: 0,
|
|
||||||
silencedUntil: new Date().toJSON(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,153 +0,0 @@
|
||||||
export const testsMock = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
id: 0,
|
|
||||||
name: "single",
|
|
||||||
class: "single.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: false,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a single test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "skip_alt"],
|
|
||||||
path: "tests/assets/suite/single.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
name: "failing",
|
|
||||||
class: "failing.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: false,
|
|
||||||
type: "ui",
|
|
||||||
description: "This is a failing test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
|
|
||||||
path: "tests/assets/suite/failing.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
name: "primary",
|
|
||||||
class: "primary.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a primary test",
|
|
||||||
tags: [
|
|
||||||
"cron_1hour",
|
|
||||||
"reg_us",
|
|
||||||
"proj_core",
|
|
||||||
"skip_alt",
|
|
||||||
"pipeline_secondary1",
|
|
||||||
],
|
|
||||||
path: "tests/assets/suite/primary.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
name: "secondary1",
|
|
||||||
class: "secondary1.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a secondary test",
|
|
||||||
tags: [
|
|
||||||
"cron_1hour",
|
|
||||||
"reg_us",
|
|
||||||
"proj_core",
|
|
||||||
"compound_tertiary1",
|
|
||||||
"compound_tertiary2",
|
|
||||||
],
|
|
||||||
path: "tests/assets/suite/secondary1.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
name: "secondary2",
|
|
||||||
class: "secondary2.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a secondary2 test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "proj_core", "compound_tertiary3"],
|
|
||||||
path: "tests/assets/suite/secondary2.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 5,
|
|
||||||
name: "tertiary1",
|
|
||||||
class: "tertiary1.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a third test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
|
||||||
path: "tests/assets/suite/tertiary1.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 6,
|
|
||||||
name: "tertiary2",
|
|
||||||
class: "tertiary2.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a third2 test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
|
||||||
path: "tests/assets/suite/tertiary2.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 5,
|
|
||||||
name: "tertiary3",
|
|
||||||
class: "tertiary3.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: true,
|
|
||||||
type: "api",
|
|
||||||
description: "This is a third3 test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
|
||||||
path: "tests/assets/suite/tertiary3.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 6,
|
|
||||||
name: "single-alt",
|
|
||||||
class: "single-alt.js",
|
|
||||||
image: "node:latest",
|
|
||||||
isPipeline: false,
|
|
||||||
type: "ui",
|
|
||||||
description: "This is an alternative test",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_alt"],
|
|
||||||
path: "tests/assets/suite/single-alt.js",
|
|
||||||
created: Date.now(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
export const mappingsMock = () => {
|
|
||||||
return [
|
|
||||||
[
|
|
||||||
{ name: "primary", delay: 0 },
|
|
||||||
{ name: "secondary1", delay: 1000 },
|
|
||||||
{ name: "tertiary1", delay: 0 },
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{ name: "primary", delay: 0 },
|
|
||||||
{ name: "secondary1", delay: 1000 },
|
|
||||||
{ name: "tertiary2", delay: 8000 },
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{ name: "primary", delay: 0 },
|
|
||||||
{ name: "secondary2", delay: 0 },
|
|
||||||
{ name: "tertiary3", delay: 3000 },
|
|
||||||
],
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,30 +0,0 @@
|
||||||
export const failingMock = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: "failing",
|
|
||||||
class: "failing.js",
|
|
||||||
timestamp: new Date().toJSON(),
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
cron: "1hour",
|
|
||||||
type: "api",
|
|
||||||
dailyFails: 12,
|
|
||||||
screenshot: "https://picsum.photos/1920/1080",
|
|
||||||
recentResults: [1, 0, 0, 1, 0],
|
|
||||||
isPipeline: false,
|
|
||||||
failedMessage: `Some Test FailureMessage`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "secondary1",
|
|
||||||
class: "secondary1.js",
|
|
||||||
timestamp: new Date().toJSON(),
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
cron: "1hour",
|
|
||||||
type: "api",
|
|
||||||
dailyFails: 1,
|
|
||||||
screenshot: "https://picsum.photos/1920/1080",
|
|
||||||
recentResults: [1, 0, 0, 1, 0],
|
|
||||||
isPipeline: true,
|
|
||||||
failedMessage: `Some Test FailureMessage from Secondary1`,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,121 +0,0 @@
|
||||||
const buildPostgresEntry = (entry) => {
|
|
||||||
const pgEntry = { ...entry };
|
|
||||||
Object.keys(pgEntry).forEach((col) => {
|
|
||||||
if (pgEntry[col] === undefined) delete pgEntry[col];
|
|
||||||
});
|
|
||||||
return pgEntry;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const buildPostgresValue = (jsVar) => {
|
|
||||||
if (jsVar === null) return "null";
|
|
||||||
if (typeof jsVar === "string") return buildPostgresString(jsVar);
|
|
||||||
if (Array.isArray(jsVar) && jsVar.length === 0) return "null";
|
|
||||||
if (Array.isArray(jsVar) && isTypeArray(jsVar, "string"))
|
|
||||||
return buildPostgresStringArray(jsVar);
|
|
||||||
return jsVar;
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildPostgresStringArray = (jsonArray) => {
|
|
||||||
if (jsonArray.length === 0) return null;
|
|
||||||
var pgArray = [...jsonArray];
|
|
||||||
var arrayString = "ARRAY [";
|
|
||||||
pgArray.forEach((e, i) => (pgArray[i] = `'${e}'`));
|
|
||||||
arrayString += pgArray.join(",");
|
|
||||||
arrayString += "]";
|
|
||||||
return arrayString;
|
|
||||||
};
|
|
||||||
|
|
||||||
const isTypeArray = (jsonArray, type) =>
|
|
||||||
jsonArray.every((e) => typeof e === type);
|
|
||||||
|
|
||||||
const buildPostgresString = (jsonString) =>
|
|
||||||
(jsonString && `'${jsonString.replaceAll("'", "''")}'`) || null;
|
|
||||||
|
|
||||||
export const insertQuery = (table, jsEntry) => {
|
|
||||||
if (typeof jsEntry !== "object") throw Error("PG Inserts must be objects!");
|
|
||||||
const entry = buildPostgresEntry(jsEntry);
|
|
||||||
const cols = Object.keys(entry);
|
|
||||||
cols.forEach((col, i) => {
|
|
||||||
entry[col] = buildPostgresValue(entry[col]);
|
|
||||||
cols[i] = `"${col}"`;
|
|
||||||
});
|
|
||||||
var query = `INSERT INTO ${table}(${cols.join(",")})\n`;
|
|
||||||
query += `VALUES(${Object.values(entry).join(",")})`;
|
|
||||||
return query;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteQuery = (table, jsEntry) => {
|
|
||||||
if (typeof jsEntry !== "object")
|
|
||||||
throw Error("PG Delete conditionals must be an object!");
|
|
||||||
const entry = buildPostgresEntry(jsEntry);
|
|
||||||
const cols = Object.keys(entry);
|
|
||||||
const conditionals = [];
|
|
||||||
for (var col of cols) {
|
|
||||||
entry[col] = buildPostgresValue(entry[col]);
|
|
||||||
if (entry[col] === "null") conditionals.push(`x.${col} IS NULL`);
|
|
||||||
else conditionals.push(`x.${col}=${entry[col]}`);
|
|
||||||
}
|
|
||||||
return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`;
|
|
||||||
};
|
|
||||||
export const onConflictUpdate = (conflicts, updates) => {
|
|
||||||
if (!Array.isArray(conflicts)) throw Error("PG Conflicts must be an array!");
|
|
||||||
if (typeof updates !== "object") throw Error("PG Updates must be objects!");
|
|
||||||
const entry = buildPostgresEntry(updates);
|
|
||||||
var query = `ON CONFLICT (${conflicts.join(",")}) DO UPDATE SET\n`;
|
|
||||||
const cols = Object.keys(entry);
|
|
||||||
for (var col of cols) {
|
|
||||||
entry[col] = buildPostgresValue(entry[col]);
|
|
||||||
}
|
|
||||||
query += cols.map((c) => `${c}=${entry[c]}`).join(",");
|
|
||||||
return query;
|
|
||||||
};
|
|
||||||
export const clearTableQuery = (table) => {
|
|
||||||
return `TRUNCATE ${table}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const selectWhereQuery = (table, jsEntry, joinWith) => {
|
|
||||||
if (typeof jsEntry !== "object") throw Error("PG Where must be an object!");
|
|
||||||
const where = buildPostgresEntry(jsEntry);
|
|
||||||
const cols = Object.keys(where);
|
|
||||||
var query = `SELECT * FROM ${table} AS x WHERE\n`;
|
|
||||||
for (var col of cols) {
|
|
||||||
where[col] = buildPostgresValue(where[col]);
|
|
||||||
}
|
|
||||||
return (query += cols.map((c) => `x.${c}=${where[c]}`).join(joinWith));
|
|
||||||
};
|
|
||||||
export const updateWhereQuery = (table, updates, wheres, joinWith) => {
|
|
||||||
if (typeof updates !== "object") throw Error("PG Updates must be an object!");
|
|
||||||
if (typeof wheres !== "object") throw Error("PG Wheres must be an object!");
|
|
||||||
const update = buildPostgresEntry(updates);
|
|
||||||
const where = buildPostgresEntry(wheres);
|
|
||||||
const updateCols = Object.keys(update);
|
|
||||||
const whereCols = Object.keys(where);
|
|
||||||
var query = `UPDATE ${table}\n`;
|
|
||||||
var updateQuery = updateCols
|
|
||||||
.map((c) => `${c} = ${buildPostgresValue(update[c])}`)
|
|
||||||
.join(",");
|
|
||||||
var whereQuery = whereCols
|
|
||||||
.map((c) => `${c} = ${buildPostgresValue(where[c])}`)
|
|
||||||
.join(joinWith);
|
|
||||||
return (query += `SET ${updateQuery} WHERE ${whereQuery}`);
|
|
||||||
};
|
|
||||||
export const updateWhereAnyQuery = (table, updates, wheres) =>
|
|
||||||
updateWhereQuery(table, updates, wheres, " OR ");
|
|
||||||
export const updateWhereAllQuery = (table, updates, wheres) =>
|
|
||||||
updateWhereQuery(table, updates, wheres, " AND ");
|
|
||||||
export const selectWhereAnyQuery = (table, where) =>
|
|
||||||
selectWhereQuery(table, where, " OR ");
|
|
||||||
export const selectWhereAllQuery = (table, where) =>
|
|
||||||
selectWhereQuery(table, where, " AND ");
|
|
||||||
|
|
||||||
export default {
|
|
||||||
selectWhereAnyQuery,
|
|
||||||
selectWhereAllQuery,
|
|
||||||
updateWhereAnyQuery,
|
|
||||||
updateWhereAllQuery,
|
|
||||||
insertQuery,
|
|
||||||
deleteQuery,
|
|
||||||
buildPostgresValue,
|
|
||||||
onConflictUpdate,
|
|
||||||
clearTableQuery,
|
|
||||||
};
|
|
|
@ -1,62 +0,0 @@
|
||||||
// Imports
|
|
||||||
import path from "node:path";
|
|
||||||
import { URL } from "node:url";
|
|
||||||
import { migrate } from "postgres-migrations";
|
|
||||||
import createPgp from "pg-promise";
|
|
||||||
import moment from "moment";
|
|
||||||
import { INFO, WARN, OK, VERB } from "../util/logging.js";
|
|
||||||
// Environment Variables
|
|
||||||
const {
|
|
||||||
QUALITEER_POSTGRES_DATABASE: database,
|
|
||||||
QUALITEER_POSTGRES_ENABLED: pgEnabled,
|
|
||||||
QUALITEER_POSTGRES_HOST: host,
|
|
||||||
QUALITEER_POSTGRES_PASSWORD: password,
|
|
||||||
QUALITEER_POSTGRES_PORT: port,
|
|
||||||
QUALITEER_POSTGRES_USER: user,
|
|
||||||
} = process.env;
|
|
||||||
|
|
||||||
// Postgres-promise Configuration
|
|
||||||
// Ensure dates get saved as UTC date strings
|
|
||||||
// This prevents the parser from doing strange datetime operations
|
|
||||||
const pgp = createPgp();
|
|
||||||
pgp.pg.types.setTypeParser(1114, (str) => moment.utc(str).format());
|
|
||||||
|
|
||||||
// Database Config
|
|
||||||
const dbConfig = {
|
|
||||||
database: database ?? "qualiteer",
|
|
||||||
user: user ?? "postgres",
|
|
||||||
password: password ?? "postgres",
|
|
||||||
host: host ?? "localhost",
|
|
||||||
port: port ?? 5432,
|
|
||||||
ensureDatabaseExists: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
const databaseDir = new URL(".", import.meta.url).pathname;
|
|
||||||
const migrationsDir = path.resolve(databaseDir, "migrations/");
|
|
||||||
|
|
||||||
const queryMock = (str) => INFO("POSTGRES MOCK", str);
|
|
||||||
|
|
||||||
const connect = (pg) => async () => {
|
|
||||||
if (pgEnabled === "false") {
|
|
||||||
WARN("POSTGRES", "Postgres Disabled!");
|
|
||||||
return { query: queryMock };
|
|
||||||
}
|
|
||||||
VERB("POSTGRES", "Migrating...");
|
|
||||||
await migrate(dbConfig, migrationsDir);
|
|
||||||
// Override fake methods
|
|
||||||
const pgInstance = pgp(dbConfig);
|
|
||||||
for (var k in pgInstance) pg[k] = pgInstance[k];
|
|
||||||
VERB("POSTGRES", "Migrated Successfully");
|
|
||||||
await pg.connect();
|
|
||||||
VERB("POSTGRES", "Postgres connected Successfully!");
|
|
||||||
|
|
||||||
OK("POSTGRES", `Connected to database ${dbConfig.database}!`);
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildPostgres = () => {
|
|
||||||
var pg = { query: queryMock };
|
|
||||||
pg.connect = connect(pg);
|
|
||||||
return pg;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default buildPostgres();
|
|
|
@ -1,56 +0,0 @@
|
||||||
import pg from "../postgres.js";
|
|
||||||
import { silencedMock } from "../mocks/alerting-mock.js";
|
|
||||||
import moment from "moment";
|
|
||||||
// Imports
|
|
||||||
import {
|
|
||||||
insertQuery,
|
|
||||||
selectWhereAnyQuery,
|
|
||||||
updateWhereAnyQuery,
|
|
||||||
deleteQuery,
|
|
||||||
} from "../pg-query.js";
|
|
||||||
// Constants
|
|
||||||
const table = "alerting";
|
|
||||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
|
||||||
|
|
||||||
export const upsertAlertSilence = async (silence) => {
|
|
||||||
const {
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
method,
|
|
||||||
expires: duration,
|
|
||||||
keepExpires,
|
|
||||||
} = silence;
|
|
||||||
const { h, m } = duration;
|
|
||||||
const expires = moment().add(h, "hours").add(m, "minutes").utc().format();
|
|
||||||
const entry = {
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
method,
|
|
||||||
expires: keepExpires ? undefined : expires,
|
|
||||||
};
|
|
||||||
const asUpdate = {};
|
|
||||||
for (var k of Object.keys(entry))
|
|
||||||
asUpdate[k] = entry[k] === "*" ? null : entry[k];
|
|
||||||
var query = id
|
|
||||||
? updateWhereAnyQuery(table, asUpdate, { id })
|
|
||||||
: insertQuery(table, entry);
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteAlertSilence = async (silence) => {
|
|
||||||
const { id } = silence;
|
|
||||||
const query = deleteQuery(table, { id });
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Queries
|
|
||||||
export const getSilencedTests = async () => {
|
|
||||||
if (PG_DISABLED) return silencedMock();
|
|
||||||
const query = `SELECT * from ${table}`;
|
|
||||||
const silenced = await pg.query(query);
|
|
||||||
silenced.forEach((t, i) => {
|
|
||||||
for (var k of Object.keys(t)) silenced[i][k] = t[k] === null ? "*" : t[k];
|
|
||||||
});
|
|
||||||
return silenced;
|
|
||||||
};
|
|
|
@ -1,121 +0,0 @@
|
||||||
import pg from "../postgres.js";
|
|
||||||
// Imports
|
|
||||||
import {
|
|
||||||
insertQuery,
|
|
||||||
selectWhereAnyQuery,
|
|
||||||
onConflictUpdate,
|
|
||||||
} from "../pg-query.js";
|
|
||||||
import { WARN } from "../../util/logging.js";
|
|
||||||
|
|
||||||
import getFilteredTags from "../tags.js";
|
|
||||||
import getDelay from "../delays.js";
|
|
||||||
// Constants
|
|
||||||
const table = "catalog";
|
|
||||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
|
||||||
import { testsMock, mappingsMock } from "../mocks/catalog-mock.js";
|
|
||||||
// Queries
|
|
||||||
|
|
||||||
export const removeDroppedTests = async (testNames) => {
|
|
||||||
// BUG: After dropping a test, the id jumps ridiculously high
|
|
||||||
const pgNames = testNames.map((tn) => `'${tn}'`).join(",");
|
|
||||||
const query = `DELETE FROM catalog as x where x.name not in (${pgNames});`;
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTest = async (name) => {
|
|
||||||
const query = selectWhereAnyQuery(table, { name });
|
|
||||||
const results = await pg.query(query);
|
|
||||||
if (results.length > 1)
|
|
||||||
WARN("CATALOG", `More than 1 test found for '${name}'`);
|
|
||||||
return results[0];
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTests = async () => {
|
|
||||||
if (PG_DISABLED) return testsMock();
|
|
||||||
const query = `SELECT * from ${table}`;
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getPipelineMappings = async () => {
|
|
||||||
if (PG_DISABLED) return mappingsMock();
|
|
||||||
const query = `SELECT * from ${table} WHERE pipeline`;
|
|
||||||
const tests = await pg.query(query);
|
|
||||||
const mappings = [];
|
|
||||||
var newTrigger;
|
|
||||||
for (var test of tests) {
|
|
||||||
if (test.triggers) continue;
|
|
||||||
const { name, delay: delayStr } = test;
|
|
||||||
var triggerStack = [{ name, delay: getDelay(delayStr), delayStr }];
|
|
||||||
newTrigger = { name, delayStr };
|
|
||||||
while (
|
|
||||||
(newTrigger = tests.find(
|
|
||||||
(te) => te.triggers && te.triggers.includes(newTrigger.name)
|
|
||||||
)) !== null
|
|
||||||
) {
|
|
||||||
if (!newTrigger) break;
|
|
||||||
triggerStack.push({
|
|
||||||
name: newTrigger.name,
|
|
||||||
delay: getDelay(newTrigger.delay),
|
|
||||||
delayStr: newTrigger.delay,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
mappings.push(triggerStack.reverse());
|
|
||||||
}
|
|
||||||
return mappings;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getProjects = async () => {
|
|
||||||
if (PG_DISABLED) {
|
|
||||||
const tests = testsMock();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const truncateTests = async () => {
|
|
||||||
if (PG_DISABLED) return console.log(`Would truncate table ${table}`);
|
|
||||||
const query = `TRUNCATE ${table} RESTART IDENTITY CASCADE;`;
|
|
||||||
return await pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const upsertTest = async (test) => {
|
|
||||||
if (PG_DISABLED) return console.log("Would insert test", test);
|
|
||||||
const {
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
image,
|
|
||||||
path,
|
|
||||||
description,
|
|
||||||
type,
|
|
||||||
created,
|
|
||||||
mergeRequest,
|
|
||||||
tags,
|
|
||||||
} = test;
|
|
||||||
|
|
||||||
const filteredTags = getFilteredTags(tags);
|
|
||||||
|
|
||||||
const env =
|
|
||||||
filteredTags.ignore && filteredTags.env
|
|
||||||
? filteredTags.env.filter((e) => !filteredTags.ignore.includes(e))
|
|
||||||
: filteredTags.env;
|
|
||||||
const catalogEntry = {
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
image,
|
|
||||||
path,
|
|
||||||
description: description ? description : null,
|
|
||||||
type,
|
|
||||||
created,
|
|
||||||
mr: mergeRequest,
|
|
||||||
tags,
|
|
||||||
crons: filteredTags.crons,
|
|
||||||
env,
|
|
||||||
regions: filteredTags.regions,
|
|
||||||
triggers: filteredTags.triggers,
|
|
||||||
pipeline: filteredTags.pipeline ? true : false,
|
|
||||||
coverage: filteredTags.coverage,
|
|
||||||
projects: filteredTags.projects,
|
|
||||||
delay: filteredTags.delay ? filteredTags.delay[0] : null,
|
|
||||||
};
|
|
||||||
const query =
|
|
||||||
insertQuery(table, catalogEntry) + onConflictUpdate(["name"], catalogEntry);
|
|
||||||
return await pg.query(query);
|
|
||||||
};
|
|
|
@ -1,93 +0,0 @@
|
||||||
import pg from "../postgres.js";
|
|
||||||
import { failingMock } from "../mocks/results-mock.js";
|
|
||||||
// Imports
|
|
||||||
import {
|
|
||||||
insertQuery,
|
|
||||||
selectWhereAnyQuery,
|
|
||||||
selectWhereAllQuery,
|
|
||||||
updateWhereAnyQuery,
|
|
||||||
} from "../pg-query.js";
|
|
||||||
// Constants
|
|
||||||
const table = "results";
|
|
||||||
const recentResultsMax = 5;
|
|
||||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
|
||||||
|
|
||||||
// Queries
|
|
||||||
export const insertTestResult = (testResult) => {
|
|
||||||
const {
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
method,
|
|
||||||
env,
|
|
||||||
timestamp,
|
|
||||||
triage,
|
|
||||||
failed,
|
|
||||||
message,
|
|
||||||
screenshot,
|
|
||||||
console: cs,
|
|
||||||
} = testResult;
|
|
||||||
|
|
||||||
var query = insertQuery(table, {
|
|
||||||
name,
|
|
||||||
class: className,
|
|
||||||
method,
|
|
||||||
env,
|
|
||||||
timestamp,
|
|
||||||
triage,
|
|
||||||
failed,
|
|
||||||
message,
|
|
||||||
screenshot,
|
|
||||||
console: cs,
|
|
||||||
});
|
|
||||||
|
|
||||||
query += "\n RETURNING *";
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getCurrentlyFailing = async () => {
|
|
||||||
if (PG_DISABLED) return failingMock();
|
|
||||||
/* This can probably be changed into a super query, but perhaps faster/smaller */
|
|
||||||
const recent = `SELECT * FROM ${table} WHERE (timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW()) AND NOT(failed AND triage)`;
|
|
||||||
const slimCatalog = `SELECT name, crons, class, type, pipeline, env AS enabled_env FROM catalog`;
|
|
||||||
const failing = `SELECT * FROM recent INNER JOIN slim_catalog USING(name) WHERE timestamp = (SELECT MAX(timestamp) FROM recent r2 WHERE recent.name = r2.name) AND failed`;
|
|
||||||
const applicableFailing = `SELECT name, count(*) as fails FROM recent WHERE recent.name IN (SELECT name FROM failing) GROUP BY name`;
|
|
||||||
/*const runHistory = `SELECT name, timestamp, failed FROM (SELECT *, ROW_NUMBER() OVER(PARTITION BY name ORDER BY timestamp) as n
|
|
||||||
FROM ${table} WHERE name IN (SELECT name FROM failing)) as ord WHERE n <= ${recentResultsMax} ORDER BY name DESC`;*/
|
|
||||||
const runHistory = `SELECT name, timestamp, failed FROM results WHERE NOT triage AND name IN (SELECT name FROM failing) ORDER BY timestamp DESC LIMIT ${recentResultsMax}`;
|
|
||||||
// const recentQuery = pg.query(recent);
|
|
||||||
const failingQuery = pg.query(
|
|
||||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}) ${failing}`
|
|
||||||
);
|
|
||||||
const applicableQuery = pg.query(
|
|
||||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${applicableFailing}`
|
|
||||||
);
|
|
||||||
const historyQuery = pg.query(
|
|
||||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${runHistory}`
|
|
||||||
);
|
|
||||||
|
|
||||||
const [currentlyFailing, applicableFails, failHistory] = await Promise.all([
|
|
||||||
failingQuery,
|
|
||||||
applicableQuery,
|
|
||||||
historyQuery,
|
|
||||||
]);
|
|
||||||
for (var i = 0; i < currentlyFailing.length; i++) {
|
|
||||||
currentlyFailing[i].dailyFails = parseInt(
|
|
||||||
applicableFails.find((af) => af.name === currentlyFailing[i].name).fails
|
|
||||||
);
|
|
||||||
currentlyFailing[i].recentResults = [];
|
|
||||||
currentlyFailing[i].enabledEnv = currentlyFailing[i].enabled_env;
|
|
||||||
currentlyFailing[i].isPipeline = currentlyFailing[i].pipeline;
|
|
||||||
delete currentlyFailing[i].enabled_env;
|
|
||||||
delete currentlyFailing[i].pipeline;
|
|
||||||
for (var fh of failHistory) {
|
|
||||||
if (fh.name !== currentlyFailing[i].name) continue;
|
|
||||||
currentlyFailing[i].recentResults.push(fh);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return currentlyFailing;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const ignoreResult = async ({ id }) => {
|
|
||||||
const query = updateWhereAnyQuery(table, { failed: false }, { id });
|
|
||||||
return pg.query(query);
|
|
||||||
};
|
|
|
@ -1,11 +0,0 @@
|
||||||
export const table = "alerting";
|
|
||||||
export const seed = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: `failing`,
|
|
||||||
class: `failing.js`,
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
expires: new Date().toJSON(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,126 +0,0 @@
|
||||||
export const table = "catalog";
|
|
||||||
export const seed = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: "single",
|
|
||||||
class: "single.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/single.js",
|
|
||||||
description: "This is a single test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "ignore_alt"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "failing",
|
|
||||||
class: "failing.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/failing.js",
|
|
||||||
description: "This is a failing test",
|
|
||||||
type: "ui",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "primary",
|
|
||||||
class: "primary.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/primary.js",
|
|
||||||
description: "This is a primary test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: [
|
|
||||||
"pipeline",
|
|
||||||
"cron_1hour",
|
|
||||||
"reg_us",
|
|
||||||
"proj_core",
|
|
||||||
"ignore_alt",
|
|
||||||
"triggers_secondary1",
|
|
||||||
"triggers_secondary2",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "secondary1",
|
|
||||||
class: "secondary1.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/secondary1.js",
|
|
||||||
description: "This is a secondary test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: [
|
|
||||||
"pipeline",
|
|
||||||
"cron_1hour",
|
|
||||||
"reg_us",
|
|
||||||
"proj_core",
|
|
||||||
"triggers_tertiary1",
|
|
||||||
"triggers_tertiary2",
|
|
||||||
"delay_1sec",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "secondary2",
|
|
||||||
class: "secondary2.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/secondary2.js",
|
|
||||||
description: "This is a secondary2 test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: [
|
|
||||||
"pipeline",
|
|
||||||
"cron_1hour",
|
|
||||||
"reg_us",
|
|
||||||
"proj_core",
|
|
||||||
"triggers_tertiary3",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tertiary1",
|
|
||||||
class: "tertiary1.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/tertiary1.js",
|
|
||||||
description: "This is a third test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tertiary2",
|
|
||||||
class: "tertiary2.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/tertiary2.js",
|
|
||||||
description: "This is a third2 test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_10sec"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "tertiary3",
|
|
||||||
class: "tertiary3.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/tertiary3.js",
|
|
||||||
description: "This is a third3 test",
|
|
||||||
type: "api",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_5sec"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "single-alt",
|
|
||||||
class: "single-alt.js",
|
|
||||||
image: "node:latest",
|
|
||||||
path: "tests/assets/suite/single-alt.js",
|
|
||||||
description: "This is an alternative test",
|
|
||||||
type: "ui",
|
|
||||||
created: new Date().toJSON(),
|
|
||||||
mergeRequest: "https://example.com",
|
|
||||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_alt"],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,29 +0,0 @@
|
||||||
export const table = "results";
|
|
||||||
export const seed = () => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
name: "failing",
|
|
||||||
class: "failing.js",
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
env: "prod",
|
|
||||||
timestamp: new Date().toJSON(),
|
|
||||||
triage: false,
|
|
||||||
failed: true,
|
|
||||||
message: "Some Test FailureMessage",
|
|
||||||
screenshot: "https://picsum.photos/1920/1080",
|
|
||||||
console: "https://example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "secondary1",
|
|
||||||
class: "secondary1.js",
|
|
||||||
method: "FAKEMETHOD",
|
|
||||||
env: "prod",
|
|
||||||
timestamp: new Date().toJSON(),
|
|
||||||
triage: false,
|
|
||||||
failed: true,
|
|
||||||
message: "Some Test FailureMessage from Secondary1",
|
|
||||||
screenshot: "https://picsum.photos/1920/1080",
|
|
||||||
console: "https://example.com",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
};
|
|
|
@ -1,26 +0,0 @@
|
||||||
import { WARN } from "../util/logging.js";
|
|
||||||
export const TAGS = {
|
|
||||||
IGNORE: { name: "ignore", tag: "ignore_", value: (t) => t },
|
|
||||||
CRON: { name: "crons", tag: "cron_", value: (t) => t },
|
|
||||||
ENV: { name: "env", tag: "env_", value: (t) => t },
|
|
||||||
REGIONS: { name: "regions", tag: "reg_", value: (t) => t },
|
|
||||||
PIPELINE: { name: "pipeline", tag: "is_pipeline", value: (t) => t },
|
|
||||||
COVERAGE: { name: "coverage", tag: "coverage_", value: (t) => t },
|
|
||||||
PROJECT: { name: "projects", tag: "proj_", value: (t) => t },
|
|
||||||
DELAY: { name: "delay", tag: "delay_", value: (t) => t },
|
|
||||||
TRIGGERS: { name: "triggers", tag: "triggers_", value: (t) => t },
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function getFilteredTags(tags) {
|
|
||||||
const filtered = {};
|
|
||||||
for (var t of tags) {
|
|
||||||
const tag = Object.values(TAGS).find((ta) => t.startsWith(ta.tag));
|
|
||||||
if (!tag) {
|
|
||||||
WARN("CATALOG", `Tag '${t}' did not have a valid prefix!`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!filtered[tag.name]) filtered[tag.name] = [];
|
|
||||||
filtered[tag.name].push(tag.value(t.replace(tag.tag, "")));
|
|
||||||
}
|
|
||||||
return filtered;
|
|
||||||
}
|
|
|
@ -1,58 +0,0 @@
|
||||||
import fs from "node:fs";
|
|
||||||
import { URL } from "node:url";
|
|
||||||
import path from "node:path";
|
|
||||||
const {
|
|
||||||
QUALITEER_EXECUTOR_URL,
|
|
||||||
QUALITEER_EXECUTOR_USE_SCRIPT,
|
|
||||||
QUALITEER_EXECUTOR_BIN,
|
|
||||||
QUALITEER_EXECUTOR_BIN_URL,
|
|
||||||
} = process.env;
|
|
||||||
|
|
||||||
const executorUrl = QUALITEER_EXECUTOR_URL;
|
|
||||||
const executorAsScript = QUALITEER_EXECUTOR_USE_SCRIPT === "true";
|
|
||||||
const executorBin = QUALITEER_EXECUTOR_BIN ?? `qltr-executor`;
|
|
||||||
const executorBinFetchUrl = QUALITEER_EXECUTOR_BIN_URL;
|
|
||||||
|
|
||||||
const jobsDir = "jobs/";
|
|
||||||
const jobsPath = path.resolve(jobsDir);
|
|
||||||
const k8sFolder = new URL(".", import.meta.url).pathname;
|
|
||||||
const defaultsFilePath = path.resolve(k8sFolder, "k8s-job.json");
|
|
||||||
const defaults = JSON.parse(fs.readFileSync(defaultsFilePath));
|
|
||||||
|
|
||||||
function commandBuilder(jobId, jobRequest) {
|
|
||||||
const executorPayload = JSON.stringify({
|
|
||||||
jobId,
|
|
||||||
jobRequest,
|
|
||||||
url: executorUrl,
|
|
||||||
});
|
|
||||||
const payload = Buffer.from(executorPayload, "utf8").toString("base64");
|
|
||||||
return [`./${executorBin}`, payload];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function jobBuilder(jobRequest) {
|
|
||||||
const { resources, name, image, id: jobId } = jobRequest;
|
|
||||||
// Safety Checks
|
|
||||||
if (!jobId) throw Error("'jobId' required!");
|
|
||||||
if (!image) throw Error("'image' required!");
|
|
||||||
|
|
||||||
// Apply configuration
|
|
||||||
const job = { ...defaults };
|
|
||||||
job.metadata.name = `qltr-${jobId}`;
|
|
||||||
const container = job.spec.template.spec.containers[0];
|
|
||||||
container.name = job.metadata.name;
|
|
||||||
container.command = commandBuilder(jobId, jobRequest);
|
|
||||||
container.image = JSON.stringify(image);
|
|
||||||
// Apply resources
|
|
||||||
job.resources = { ...job.resources, ...resources };
|
|
||||||
return job;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const createFile = (job) => {
|
|
||||||
const { name } = job.metadata;
|
|
||||||
if (!fs.existsSync(jobsPath)) fs.mkdirSync(jobsPath);
|
|
||||||
const filePath = path.resolve(jobsDir, `${name}.json`);
|
|
||||||
fs.writeFileSync(filePath, JSON.stringify(job));
|
|
||||||
return filePath;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const deleteFile = (filePath) => fs.unlinkSync(filePath);
|
|
|
@ -1,14 +0,0 @@
|
||||||
import { INFO, ERR, OK, VERB } from "../../util/logging.js";
|
|
||||||
import cp from "node:child_process";
|
|
||||||
|
|
||||||
const jobStr = process.argv.slice(2)[0];
|
|
||||||
const job = JSON.parse(jobStr);
|
|
||||||
const { command } = job.spec.template.spec.containers[0];
|
|
||||||
INFO("EXEC", "Internal Executor Starting!");
|
|
||||||
cp.exec(command, (error, stdout, stderr) => {
|
|
||||||
if (error) ERR("EXEC", error);
|
|
||||||
//if (stdout) VERB("EXEC-STDOUT", stdout);
|
|
||||||
//if (stderr) VERB("EXEC-STDERR", stderr);
|
|
||||||
OK("EXEC", "Internal Executor Finished!");
|
|
||||||
process.exit(error ? 1 : 0);
|
|
||||||
});
|
|
|
@ -1,20 +0,0 @@
|
||||||
import cp from "node:child_process";
|
|
||||||
import fs from "node:fs";
|
|
||||||
import path from "node:path";
|
|
||||||
import { jobBuilder, createFile, deleteFile } from "./k8s-common.js";
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const internalEngine = path.resolve("./lib/jobs/k8s/k8s-internal-engine.js");
|
|
||||||
|
|
||||||
// Functions
|
|
||||||
const applyFileInternally = (filePath) => {
|
|
||||||
const job = fs.readFileSync(filePath, { encoding: "utf8" });
|
|
||||||
cp.fork(internalEngine, [job]);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default async function createJobInternally(jobRequest) {
|
|
||||||
const job = jobBuilder(jobRequest);
|
|
||||||
const filePath = createFile(job);
|
|
||||||
applyFileInternally(filePath);
|
|
||||||
deleteFile(filePath);
|
|
||||||
}
|
|
|
@ -1,34 +0,0 @@
|
||||||
{
|
|
||||||
"apiVersion": "batch/v1",
|
|
||||||
"kind": "Job",
|
|
||||||
"metadata": {
|
|
||||||
"name": "qltr-job-test-suite-1"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"ttlSecondsAfterFinished": 2,
|
|
||||||
"template": {
|
|
||||||
"spec": {
|
|
||||||
"containers": [
|
|
||||||
{
|
|
||||||
"name": "qltr-job-test-suite-1",
|
|
||||||
"image": "node:latest",
|
|
||||||
"imagePullPolicy": "Always",
|
|
||||||
"command": ["node", "--version"],
|
|
||||||
"envFrom": [
|
|
||||||
{
|
|
||||||
"configMapRef": {
|
|
||||||
"name": "qualiteer-job-environment"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"imagePullSecrets": [
|
|
||||||
{ "name": "usw-registry-secret", "namespace": "default" }
|
|
||||||
],
|
|
||||||
"restartPolicy": "Never"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"backoffLimit": 4
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,19 +0,0 @@
|
||||||
import k8s from "@kubernetes/client-node";
|
|
||||||
import { INFO, ERR } from "../util/logging.js";
|
|
||||||
import { jobBuilder, createFile, deleteFile } from "./k8s-common.js";
|
|
||||||
|
|
||||||
export default async function createJob(jobRequest) {
|
|
||||||
//console.log(await jobRequest.tests);
|
|
||||||
const job = jobBuilder(jobRequest);
|
|
||||||
job.spec.template.spec.containers[0].image =
|
|
||||||
"registry.dunemask.net/garden/dev/reed:latest";
|
|
||||||
const kc = new k8s.KubeConfig();
|
|
||||||
kc.loadFromCluster();
|
|
||||||
const batchV1Api = kc.makeApiClient(k8s.BatchV1Api);
|
|
||||||
const batchV1beta1Api = kc.makeApiClient(k8s.BatchV1beta1Api);
|
|
||||||
const jobName = job.metadata.name;
|
|
||||||
batchV1Api
|
|
||||||
.createNamespacedJob("dunestorm-dunemask", job)
|
|
||||||
.then((res) => INFO("K8S", `Job ${jobName} created!`))
|
|
||||||
.catch((err) => ERR("K8S", err));
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
import Rabbiteer from "rabbiteer";
|
|
||||||
import buildWorkers from "./workers/index.js";
|
|
||||||
|
|
||||||
// Pull Environment Variables
|
|
||||||
const {
|
|
||||||
QUALITEER_RABBIT_HOST: host,
|
|
||||||
QUALITEER_RABBIT_USER: user,
|
|
||||||
QUALITEER_RABBIT_PASS: pass,
|
|
||||||
} = process.env;
|
|
||||||
|
|
||||||
// Rabbit Config
|
|
||||||
const rabbitConfig = {
|
|
||||||
protocol: "amqp:",
|
|
||||||
host: `amqp://${host}` ?? "localhost",
|
|
||||||
user: user ?? "guest",
|
|
||||||
pass: pass ?? "guest",
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildRabbiteer = (pg, skio) =>
|
|
||||||
new Rabbiteer(null, buildWorkers(skio), { autoRabbit: rabbitConfig });
|
|
||||||
|
|
||||||
export default buildRabbiteer;
|
|
|
@ -1,19 +0,0 @@
|
||||||
// Imports
|
|
||||||
import { Worker } from "rabbiteer";
|
|
||||||
// Class
|
|
||||||
export default class KubeJobsWorker extends Worker {
|
|
||||||
constructor() {
|
|
||||||
super("KubeJobs");
|
|
||||||
}
|
|
||||||
|
|
||||||
async configure(ch) {
|
|
||||||
await ch.assertExchange("KubeJobsExchange", "direct");
|
|
||||||
await ch.assertQueue(this.queue, this.queueOptions);
|
|
||||||
await ch.bindQueue(this.queue, "KubeJobsExchange", "KubeJobs");
|
|
||||||
await ch.consume(this.queue, (msg) => this.consume(msg, () => ch.ack(msg)));
|
|
||||||
}
|
|
||||||
|
|
||||||
onMessage(string) {
|
|
||||||
console.log(`Died: ${string}`);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,45 +0,0 @@
|
||||||
// Imports
|
|
||||||
import { Worker } from "rabbiteer";
|
|
||||||
import { VERB } from "../../util/logging.js";
|
|
||||||
import { insertTestResult } from "../../database/queries/results.js";
|
|
||||||
import evt from "../../../common/sockets/events.js";
|
|
||||||
// Class
|
|
||||||
export default class TestResultsWorker extends Worker {
|
|
||||||
constructor(skio) {
|
|
||||||
super("TestResults");
|
|
||||||
this.skio = skio;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Example Test Result
|
|
||||||
{
|
|
||||||
testName: “SomeTest”,
|
|
||||||
testClass: “SomeClass”,
|
|
||||||
testMethod: “SomeMethod”,
|
|
||||||
testType: “API/UI”,
|
|
||||||
testTimestamp: 123893024,
|
|
||||||
origin: “TestSuite”,
|
|
||||||
failed: true,
|
|
||||||
failedMessage: “Some Failure”,
|
|
||||||
screenshotUrl: “https://screenshot”,
|
|
||||||
expectedScreenshotUrl: “https://expected”
|
|
||||||
consoleLogUrl: “https://consolelog”
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
async onMessage(testResult) {
|
|
||||||
const { pipeline } = testResult;
|
|
||||||
await this.handleReporting(testResult);
|
|
||||||
// Alter to start next test
|
|
||||||
// TODO the delay should be autopopulated either by the suite, or filled in by the server
|
|
||||||
if (pipeline) return this.pipelineTrigger(pipeline);
|
|
||||||
}
|
|
||||||
|
|
||||||
pipelineTrigger(pipeline) {
|
|
||||||
const { dashboardSocketId: dsi } = pipeline;
|
|
||||||
this.skio.to(dsi).emit(evt.PPL_TRG, pipeline);
|
|
||||||
}
|
|
||||||
|
|
||||||
handleReporting(result) {
|
|
||||||
VERB("TestResults", result.name);
|
|
||||||
insertTestResult(result);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,4 +0,0 @@
|
||||||
import TestResultsWorker from "./TestResultsWorker.js";
|
|
||||||
|
|
||||||
const buildWorkers = (skio) => [new TestResultsWorker(skio)];
|
|
||||||
export default buildWorkers;
|
|
|
@ -1,41 +0,0 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
|
||||||
import {
|
|
||||||
getSilencedTests,
|
|
||||||
upsertAlertSilence,
|
|
||||||
deleteAlertSilence,
|
|
||||||
} from "../database/queries/alerting.js";
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
// Apply Middlewares
|
|
||||||
router.use(jsonMiddleware());
|
|
||||||
|
|
||||||
// Get Routes
|
|
||||||
router.get("/silenced", (req, res) => {
|
|
||||||
getSilencedTests().then((t) => res.send(t));
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post Routes
|
|
||||||
router.post("/silence", (req, res) => {
|
|
||||||
const { name, class: className, method, expires, keepExpires } = req.body;
|
|
||||||
if (!name || !className || !method)
|
|
||||||
return res
|
|
||||||
.status(400)
|
|
||||||
.send("'name', 'class', and 'method' are all required Fields!");
|
|
||||||
if (expires === null)
|
|
||||||
return deleteAlertSilence(req.body)
|
|
||||||
.then(() => res.sendStatus(200))
|
|
||||||
.catch((e) => res.status(500).send(e));
|
|
||||||
const { h, m } = keepExpires ? {} : expires;
|
|
||||||
if (!keepExpires && (h == null || m == null))
|
|
||||||
return res.status(400).send("Both 'h' and 'm' are required fields!");
|
|
||||||
if (!keepExpires && (h < 0 || m < 0))
|
|
||||||
return res
|
|
||||||
.status(400)
|
|
||||||
.send("'h' and 'm' must be greater than or equal to 0!");
|
|
||||||
// TODO set max times as well
|
|
||||||
if (!keepExpires && (h > 72 || m > 59))
|
|
||||||
res.status(400).send("'h' and 'm' must not exceed the set maxes!");
|
|
||||||
upsertAlertSilence(req.body).then(() => res.sendStatus(200));
|
|
||||||
});
|
|
||||||
|
|
||||||
export default router;
|
|
|
@ -1,44 +0,0 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
|
||||||
import {
|
|
||||||
getTests,
|
|
||||||
getPipelineMappings,
|
|
||||||
upsertTest,
|
|
||||||
truncateTests,
|
|
||||||
removeDroppedTests,
|
|
||||||
} from "../database/queries/catalog.js";
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
const maxSize = 1024 * 1024 * 100; // 100MB
|
|
||||||
|
|
||||||
// Apply Middlewares
|
|
||||||
router.use(jsonMiddleware({ limit: maxSize }));
|
|
||||||
|
|
||||||
// Get Routes
|
|
||||||
router.get("/tests", (req, res) => {
|
|
||||||
getTests().then((t) => res.json(t));
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get("/pipeline-mappings", (req, res) => {
|
|
||||||
getPipelineMappings().then((m) => res.json(m));
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post Routes
|
|
||||||
router.post("/update", (req, res) => {
|
|
||||||
if (!req.body) return res.status(400).send("Body required!");
|
|
||||||
if (!Array.isArray(req.body))
|
|
||||||
return res.status(400).send("Body must be an array!");
|
|
||||||
const wrongImage = req.body.find(({ image }) => image !== req.body[0].image);
|
|
||||||
if (wrongImage)
|
|
||||||
return res.status(400).send("Tests cannot have unique images!");
|
|
||||||
const testNames = req.body.map(({ name }) => name);
|
|
||||||
|
|
||||||
// Upsert new tests
|
|
||||||
const upserts = Promise.all(
|
|
||||||
req.body.map((catalogItem) => upsertTest(catalogItem))
|
|
||||||
);
|
|
||||||
const dropRm = upserts.then(() => removeDroppedTests(testNames));
|
|
||||||
|
|
||||||
dropRm.then(() => res.sendStatus(200)).catch((e) => res.status(500).send(e));
|
|
||||||
});
|
|
||||||
|
|
||||||
export default router;
|
|
|
@ -1,13 +0,0 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
|
||||||
import TestResultsWorker from "../rabbit/workers/TestResultsWorker.js";
|
|
||||||
|
|
||||||
export default function buildDevRoute(pg, skio) {
|
|
||||||
const router = Router();
|
|
||||||
router.use(jsonMiddleware());
|
|
||||||
router.post("/rabbit/TestResults", (req, res) => {
|
|
||||||
const { testResult } = req.body;
|
|
||||||
new TestResultsWorker(skio).onMessage(testResult);
|
|
||||||
res.sendStatus(200);
|
|
||||||
});
|
|
||||||
return router;
|
|
||||||
}
|
|
8
libold/server/routes/react-route.js
vendored
8
libold/server/routes/react-route.js
vendored
|
@ -1,8 +0,0 @@
|
||||||
import express, { Router } from "express";
|
|
||||||
import path from "path";
|
|
||||||
const router = Router();
|
|
||||||
router.use("/", express.static(path.resolve("./build")));
|
|
||||||
router.get("/*", (req, res) =>
|
|
||||||
res.sendFile(path.resolve("./build/index.html"))
|
|
||||||
);
|
|
||||||
export default router;
|
|
|
@ -1,27 +0,0 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
|
||||||
import {
|
|
||||||
getCurrentlyFailing,
|
|
||||||
ignoreResult,
|
|
||||||
} from "../database/queries/results.js";
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
// Apply Middlewares
|
|
||||||
router.use(jsonMiddleware());
|
|
||||||
|
|
||||||
// Get Routes
|
|
||||||
router.get("/failing", (req, res) => {
|
|
||||||
getCurrentlyFailing().then((f) => res.json(f));
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post Routes
|
|
||||||
router.post("/history", (req, res) => {
|
|
||||||
res.send([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.post("/ignore", (req, res) => {
|
|
||||||
if (!req.body || !req.body.id)
|
|
||||||
return res.status(400).send("'id' is required!");
|
|
||||||
ignoreResult(req.body).then(() => res.sendStatus(200));
|
|
||||||
});
|
|
||||||
|
|
||||||
export default router;
|
|
|
@ -1,30 +0,0 @@
|
||||||
// Imports
|
|
||||||
import express from "express";
|
|
||||||
|
|
||||||
// Routes
|
|
||||||
import vitals from "../routes/vitals-route.js";
|
|
||||||
import results from "../routes/results-route.js";
|
|
||||||
import alerting from "../routes/alerting-route.js";
|
|
||||||
import react from "../routes/react-route.js";
|
|
||||||
import catalog from "../routes/catalog-route.js";
|
|
||||||
|
|
||||||
import buildDevRoute from "../routes/dev-route.js";
|
|
||||||
|
|
||||||
export default function buildRoutes(pg, skio) {
|
|
||||||
const router = express.Router();
|
|
||||||
// Special Routes
|
|
||||||
router.use(vitals);
|
|
||||||
router.all("/", (req, res) => res.redirect("/qualiteer"));
|
|
||||||
if (process.env.USE_DEV_ROUTER === "true")
|
|
||||||
router.use("/api/dev", buildDevRoute(pg, skio));
|
|
||||||
|
|
||||||
// Middlewares
|
|
||||||
|
|
||||||
// Routes
|
|
||||||
router.use("/qualiteer", react); // Static Build Route
|
|
||||||
router.use("/api/results", results);
|
|
||||||
router.use("/api/alerting", alerting);
|
|
||||||
router.use("/api/catalog", catalog);
|
|
||||||
|
|
||||||
return router;
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
import { Router } from "express";
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
// Get Routes
|
|
||||||
router.get("/healthz", (req, res) => res.sendStatus(200));
|
|
||||||
|
|
||||||
export default router;
|
|
|
@ -1,28 +0,0 @@
|
||||||
// Imports
|
|
||||||
import { Chalk } from "chalk";
|
|
||||||
const { redBright, greenBright, yellowBright, cyanBright, magentaBright } =
|
|
||||||
new Chalk({ level: 2 });
|
|
||||||
|
|
||||||
// Logging
|
|
||||||
const logColor = (color, header, ...args) =>
|
|
||||||
console.log(color(header), ...args);
|
|
||||||
|
|
||||||
export const logError = (...args) => logColor(redBright, ...args);
|
|
||||||
|
|
||||||
export const logConfirm = (...args) => logColor(greenBright, ...args);
|
|
||||||
|
|
||||||
export const logWarn = (...args) => logColor(yellowBright, ...args);
|
|
||||||
|
|
||||||
export const logInfo = (...args) => logColor(cyanBright, ...args);
|
|
||||||
|
|
||||||
export const logVerbose = (...args) => logColor(magentaBright, ...args);
|
|
||||||
|
|
||||||
export const ERR = (header, ...args) => logError(`[${header}]`, ...args);
|
|
||||||
|
|
||||||
export const OK = (header, ...args) => logConfirm(`[${header}]`, ...args);
|
|
||||||
|
|
||||||
export const WARN = (header, ...args) => logWarn(`[${header}]`, ...args);
|
|
||||||
|
|
||||||
export const INFO = (header, ...args) => logInfo(`[${header}]`, ...args);
|
|
||||||
|
|
||||||
export const VERB = (header, ...args) => logVerbose(`[${header}]`, ...args);
|
|
Loading…
Add table
Add a link
Reference in a new issue