Link K8S deps properly

This commit is contained in:
Elijah Dunemask 2022-10-08 17:47:46 +00:00
parent 0ac77cdb15
commit f0260fc819
64 changed files with 4282 additions and 3069 deletions

View file

@ -6,13 +6,14 @@ import { INFO, OK, logInfo } from "../util/logging.js";
// Import Core Modules
import buildRoutes from "../routes/router.js";
import buildPostgres from "../database/postgres.js";
import pg from "../database/postgres.js";
import injectSockets from "../sockets/socket-server.js";
import JobManager from "../jobs/JobManager.js";
import buildRabbiteer from "../rabbit/rabbit-workers.js";
// Constants
const title = "QLTR";
const rabbiteerEnabled = process.env.QUALITEER_RABBITEER_ENABLED !== "false";
const port = process.env.QUALITEER_DEV_PORT ?? 52000;
// Class
@ -27,7 +28,7 @@ export default class Qualiteer {
logInfo(fig.textSync(title, "Cyberlarge"));
INFO("INIT", "Initializing...");
this.app = express();
this.pg = buildPostgres();
this.pg = pg;
this.server = http.createServer(this.app);
this.sockets = injectSockets(this.server, this.jobs);
this.routes = buildRoutes(this.pg, this.sockets);
@ -37,7 +38,8 @@ export default class Qualiteer {
async _connect() {
await this.pg.connect();
// await this.rabbiteer.connect();
if (!rabbiteerEnabled) return;
await this.rabbiteer.connect();
}
start() {

23
lib/database/delays.js Normal file
View file

@ -0,0 +1,23 @@
const seconds = 1000;
const minutes = 60 * seconds;
const hours = 60 * minutes;
export const DELAYS = {
"1sec": 1 * seconds,
"5sec": 5 * seconds,
"10sec": 10 * seconds,
"30sec": 30 * seconds,
"1min": 1 * minutes,
"5min": 5 * minutes,
"10min": 10 * minutes,
"15min": 15 * minutes,
"30min": 30 * minutes,
"1hour": 1 * hours,
"2hour": 2 * hours,
"3hour": 3 * hours,
"4hour": 4 * hours,
};
export default function getDelay(delayStr) {
if (DELAYS[delayStr]) return DELAYS[delayStr];
return 0;
}

View file

@ -0,0 +1,23 @@
CREATE SEQUENCE catalog_id_seq;
CREATE TABLE catalog (
id bigint NOT NULL DEFAULT nextval('catalog_id_seq') PRIMARY KEY,
name varchar(255) DEFAULT NULL,
class varchar(255) DEFAULT NULL,
image varchar(255) DEFAULT NULL,
"path" varchar(255) DEFAULT NULL,
description varchar(1023) DEFAULT NULL,
type varchar(31) DEFAULT NULL,
created TIMESTAMP NOT NULL DEFAULT now(),
mr varchar(255) DEFAULT NULL,
tags varchar(255)[] DEFAULT NULL,
crons varchar(127) DEFAULT NULL,
env varchar(31)[] DEFAULT NULL,
regions varchar(15)[] DEFAULT NULL,
triggers varchar(255)[] DEFAULT NULL,
pipeline BOOLEAN DEFAULT FALSE,
coverage varchar(255)[] DEFAULT NULL,
projects varchar(255)[] DEFAULT NULL,
delay varchar(31) DEFAULT NULL,
CONSTRAINT unique_name UNIQUE(name)
);
ALTER SEQUENCE catalog_id_seq OWNED BY catalog.id;

View file

@ -1,14 +0,0 @@
CREATE SEQUENCE test_results_id_seq;
CREATE TABLE test_results (
id bigint NOT NULL DEFAULT nextval('test_results_id_seq') PRIMARY KEY,
name varchar(255) DEFAULT NULL,
"method" varchar(255) DEFAULT NULL,
env varchar(31) DEFAULT NULL,
"timestamp" TIMESTAMP NOT NULL DEFAULT now(),
retry BOOLEAN DEFAULT FALSE,
failed BOOLEAN DEFAULT FALSE,
failed_message varchar(2047) DEFAULT NULL,
screenshot varchar(255) DEFAULT NULL,
weblog varchar(255) DEFAULT NULL
);
ALTER SEQUENCE test_results_id_seq OWNED BY test_results.id;

View file

@ -1,18 +0,0 @@
CREATE SEQUENCE test_catalog_id_seq;
CREATE TABLE test_catalog (
id bigint NOT NULL DEFAULT nextval('test_catalog_id_seq') PRIMARY KEY,
name varchar(255) DEFAULT NULL,
class varchar(255) DEFAULT NULL,
compound BOOLEAN DEFAULT FALSE,
type varchar(31) DEFAULT NULL,
markers varchar(255)[] DEFAULT NULL,
ignored BOOLEAN DEFAULT FALSE,
comment varchar(1023) DEFAULT NULL,
coverage varchar(255)[] DEFAULT NULL,
env varchar(31)[] DEFAULT NULL,
"path" varchar(255) DEFAULT NULL,
regions varchar(15)[] DEFAULT NULL,
origin varchar(255) DEFAULT NULL,
cron varchar(127) DEFAULT NULL
);
ALTER SEQUENCE test_catalog_id_seq OWNED BY test_catalog.id;

View file

@ -0,0 +1,15 @@
CREATE SEQUENCE results_id_seq;
CREATE TABLE results (
id bigint NOT NULL DEFAULT nextval('results_id_seq') PRIMARY KEY,
name varchar(255) DEFAULT NULL,
class varchar(255) DEFAULT NULL,
"method" varchar(255) DEFAULT NULL,
env varchar(31) DEFAULT NULL,
"timestamp" TIMESTAMP NOT NULL DEFAULT now(),
triage BOOLEAN DEFAULT FALSE,
failed BOOLEAN DEFAULT FALSE,
message varchar(2047) DEFAULT NULL,
screenshot varchar(255) DEFAULT NULL,
console varchar(255) DEFAULT NULL
);
ALTER SEQUENCE results_id_seq OWNED BY results.id;

View file

@ -0,0 +1,9 @@
CREATE SEQUENCE alerting_id_seq;
CREATE TABLE alerting (
id bigint NOT NULL DEFAULT nextval('alerting_id_seq') PRIMARY KEY,
name varchar(255) DEFAULT NULL,
class varchar(255) DEFAULT NULL,
"method" varchar(255) DEFAULT NULL,
expires TIMESTAMP NOT NULL DEFAULT now()
);
ALTER SEQUENCE alerting_id_seq OWNED BY alerting.id;

View file

@ -52,7 +52,8 @@ export const deleteQuery = (table, jsEntry) => {
const conditionals = [];
for (var col of cols) {
entry[col] = buildPostgresValue(entry[col]);
conditionals.push(`x.${col}=${entry[col]}`);
if (entry[col] === "null") conditionals.push(`x.${col} IS NULL`);
else conditionals.push(`x.${col}=${entry[col]}`);
}
return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`;
};

View file

@ -2,15 +2,15 @@
import { migrate } from "postgres-migrations";
import createPgp from "pg-promise";
import moment from "moment";
import { INFO, WARN } from "../util/logging.js";
import { INFO, WARN, OK, VERB } from "../util/logging.js";
// Environment Variables
const {
POSTGRES_DATABASE: database,
POSTGRES_DISABLED: pgDisabled,
POSTGRES_HOST: host,
POSTGRES_PASSWORD: password,
POSTGRES_PORT: port,
POSTGRES_USER: user,
QUALITEER_POSTGRES_DATABASE: database,
QUALITEER_POSTGRES_ENABLED: pgEnabled,
QUALITEER_POSTGRES_HOST: host,
QUALITEER_POSTGRES_PASSWORD: password,
QUALITEER_POSTGRES_PORT: port,
QUALITEER_POSTGRES_USER: user,
} = process.env;
// Postgres-promise Configuration
@ -34,20 +34,26 @@ const migrationsDir = "lib/database/migrations";
const queryMock = (str) => INFO("POSTGRES MOCK", str);
const connect = (pg) => async () => {
if (pgDisabled) {
if (pgEnabled === "false") {
WARN("POSTGRES", "Postgres Disabled!");
return { query: queryMock };
}
VERB("POSTGRES", "Migrating...");
await migrate(dbConfig, migrationsDir);
// Override the global variable DB
pg = pgp(dbConfig);
// Override fake methods
const pgInstance = pgp(dbConfig);
for (var k in pgInstance) pg[k] = pgInstance[k];
VERB("POSTGRES", "Migrated Successfully");
await pg.connect();
OK("POSTGRES", `Connected to database ${database}!`);
VERB("POSTGRES", "Postgres connected Successfully!");
OK("POSTGRES", `Connected to database ${dbConfig.database}!`);
};
const buildPostgres = () => {
var pg = { query: queryMock, connect: connect(pg) };
var pg = { query: queryMock };
pg.connect = connect(pg);
return pg;
};
export default buildPostgres;
export default buildPostgres();

View file

@ -1,18 +1,56 @@
import pg from "../postgres.js";
import { silencedMock } from "../mocks/alerting-mock.js";
import moment from "moment";
// Imports
import {
insertQuery,
selectWhereAnyQuery,
updateWhereAnyQuery,
deleteQuery,
} from "../pg-query.js";
// Constants
const table = "silenced_tests";
const table = "alerting";
const PG_DISABLED = process.env.POSTGRES_DISABLED;
export const upsertAlertSilence = async (silence) => {
const {
id,
name,
class: className,
method,
expires: duration,
keepExpires,
} = silence;
const { h, m } = duration;
const expires = moment().add(h, "hours").add(m, "minutes").utc().format();
const entry = {
name,
class: className,
method,
expires: keepExpires ? undefined : expires,
};
const asUpdate = {};
for (var k of Object.keys(entry))
asUpdate[k] = entry[k] === "*" ? null : entry[k];
var query = id
? updateWhereAnyQuery(table, asUpdate, { id })
: insertQuery(table, entry);
return pg.query(query);
};
export const deleteAlertSilence = async (silence) => {
const { id } = silence;
const query = deleteQuery(table, { id });
return pg.query(query);
};
// Queries
export const getSilencedTests = async () => {
if (PG_DISABLED) return silencedMock();
const query = `SELECT * from ${table}`;
return pg.query(query);
const silenced = await pg.query(query);
silenced.forEach((t, i) => {
for (var k of Object.keys(t)) silenced[i][k] = t[k] === null ? "*" : t[k];
});
return silenced;
};

View file

@ -3,10 +3,13 @@ import pg from "../postgres.js";
import {
insertQuery,
selectWhereAnyQuery,
updateWhereAnyQuery,
onConflictUpdate,
} from "../pg-query.js";
import getFilteredTags from "../tags.js";
import getDelay from "../delays.js";
// Constants
const table = "tests";
const table = "catalog";
const PG_DISABLED = process.env.POSTGRES_DISABLED;
import { testsMock, mappingsMock } from "../mocks/catalog-mock.js";
// Queries
@ -19,8 +22,30 @@ export const getTests = async () => {
export const getPipelineMappings = async () => {
if (PG_DISABLED) return mappingsMock();
const query = `SELECT * from ${table}`;
return pg.query(query);
const query = `SELECT * from ${table} WHERE pipeline`;
const tests = await pg.query(query);
const mappings = [];
var newTrigger;
for (var test of tests) {
if (test.triggers) continue;
const { name, delay: delayStr } = test;
var triggerStack = [{ name, delay: getDelay(delayStr), delayStr }];
newTrigger = { name, delayStr };
while (
(newTrigger = tests.find(
(te) => te.triggers && te.triggers.includes(newTrigger.name)
)) !== null
) {
if (!newTrigger) break;
triggerStack.push({
name: newTrigger.name,
delay: getDelay(newTrigger.delay),
delayStr: newTrigger.delay,
});
}
mappings.push(triggerStack.reverse());
}
return mappings;
};
export const getProjects = async () => {
@ -28,3 +53,47 @@ export const getProjects = async () => {
const tests = testsMock();
}
};
export const upsertTest = async (test) => {
if (PG_DISABLED) return console.log("Would insert test", test);
const {
name,
class: className,
image,
path,
description,
type,
created,
mergeRequest,
tags,
} = test;
const filteredTags = getFilteredTags(tags);
const env =
filteredTags.ignore && filteredTags.env
? filteredTags.env.filter((e) => !filteredTags.ignore.includes(e))
: filteredTags.env;
const catalogEntry = {
name,
class: className,
image,
path,
description: description ? description : null,
type,
created,
mr: mergeRequest,
tags,
crons: filteredTags.crons,
env,
regions: filteredTags.regions,
triggers: filteredTags.triggers,
pipeline: filteredTags.pipeline ? true : false,
coverage: filteredTags.coverage,
projects: filteredTags.projects,
delay: filteredTags.delay ? filteredTags.delay[0] : null,
};
const query =
insertQuery(table, catalogEntry) + onConflictUpdate(["name"], catalogEntry);
return await pg.query(query);
};

View file

@ -8,41 +8,36 @@ import {
updateWhereAnyQuery,
} from "../pg-query.js";
// Constants
const table = "test_results";
const table = "results";
const recentResultsMax = 5;
const PG_DISABLED = process.env.POSTGRES_DISABLED;
// Queries
export const insertTestResult = (testResult) => {
const {
test_name,
test_class,
test_method,
test_path,
test_type,
test_timestamp,
test_retry,
origin,
name,
class: className,
method,
env,
timestamp,
triage,
failed,
failed_message,
screenshot_url,
expected_screenshot_url,
weblog_url,
message,
screenshot,
console: cs,
} = testResult;
var query = insertQuery(table, {
test_name,
test_class,
test_method,
test_path,
test_type,
test_timestamp,
test_retry,
origin,
name,
class: className,
method,
env,
timestamp,
triage,
failed,
failed_message,
screenshot_url,
expected_screenshot_url,
weblog_url,
message,
screenshot,
console: cs,
});
query += "\n RETURNING *";
@ -51,19 +46,48 @@ export const insertTestResult = (testResult) => {
export const getCurrentlyFailing = async () => {
if (PG_DISABLED) return failingMock();
/**/
const query = `WITH recent as (SELECT * FROM test_results WHERE (timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW()) AND NOT(failed AND retry)) SELECT * FROM recent WHERE timestamp = (SELECT MAX(timestamp) FROM recent r2 WHERE recent.name = r2.name) AND failed;
`;
return pg.query(query);
/* This can probably be changed into a super query, but perhaps faster/smaller */
const recent = `SELECT * FROM ${table} WHERE (timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW()) AND NOT(failed AND triage)`;
const slimCatalog = `SELECT name, crons, class, type, pipeline, env AS enabled_env FROM catalog`;
const failing = `SELECT * FROM recent INNER JOIN slim_catalog USING(name) WHERE timestamp = (SELECT MAX(timestamp) FROM recent r2 WHERE recent.name = r2.name) AND failed`;
const applicableFailing = `SELECT name, count(*) as fails FROM recent WHERE recent.name IN (SELECT name FROM failing) GROUP BY name`;
/*const runHistory = `SELECT name, timestamp, failed FROM (SELECT *, ROW_NUMBER() OVER(PARTITION BY name ORDER BY timestamp) as n
FROM ${table} WHERE name IN (SELECT name FROM failing)) as ord WHERE n <= ${recentResultsMax} ORDER BY name DESC`;*/
const runHistory = `SELECT name, timestamp, failed FROM results WHERE NOT triage AND name IN (SELECT name FROM failing) ORDER BY timestamp DESC LIMIT ${recentResultsMax}`;
// const recentQuery = pg.query(recent);
const failingQuery = pg.query(
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}) ${failing}`
);
const applicableQuery = pg.query(
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${applicableFailing}`
);
const historyQuery = pg.query(
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${runHistory}`
);
/*SELECT * FROM test_results WHERE "timestamp" BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW(); <-- Last 24 hours all runs*/
/* SELECT * FROM test_results tr1 WHERE timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW() AND timestamp = (SELECT MAX(timestamp) FROM test_results tr2 WHERE tr1.name = tr2.name); <-- Last 24 hours only most recent
*/
const [currentlyFailing, applicableFails, failHistory] = await Promise.all([
failingQuery,
applicableQuery,
historyQuery,
]);
for (var i = 0; i < currentlyFailing.length; i++) {
currentlyFailing[i].dailyFails = parseInt(
applicableFails.find((af) => af.name === currentlyFailing[i].name).fails
);
currentlyFailing[i].recentResults = [];
currentlyFailing[i].enabledEnv = currentlyFailing[i].enabled_env;
currentlyFailing[i].isPipeline = currentlyFailing[i].pipeline;
delete currentlyFailing[i].enabled_env;
delete currentlyFailing[i].pipeline;
for (var fh of failHistory) {
if (fh.name !== currentlyFailing[i].name) continue;
currentlyFailing[i].recentResults.push(fh);
}
}
return currentlyFailing;
};
export const getCurrentlyFailingFull = (env) => {
const query = `WITH recent AS (SELECT * FROM test_results WHERE (timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW()) AND NOT(failed AND retry) AND env='prod') SELECT * FROM recent INNER JOIN test_catalog USING(name) WHERE timestamp = (SELECT MAX(timestamp) FROM recent r2 WHERE recent.name = r2.name) AND failed;
;`;
export const ignoreResult = async ({ id }) => {
const query = updateWhereAnyQuery(table, { failed: false }, { id });
return pg.query(query);
};

30
lib/database/seed.js Normal file
View file

@ -0,0 +1,30 @@
import pg from "./postgres.js";
import { upsertTest } from "./queries/catalog.js";
import { insertTestResult } from "./queries/results.js";
import { upsertAlertSilence } from "./queries/alerting.js";
import {
seed as catalogSeed,
table as catalogTable,
} from "./seeds/catalog-seed.js";
import {
seed as resultsSeed,
table as resultsTable,
} from "./seeds/results-seed.js";
import {
seed as alertingSeed,
table as alertingTable,
} from "./seeds/alerting-seed.js";
const database = process.env.POSTGRES_DATABASE ?? "qualiteer";
await pg.connect();
const resetAndSeed = async (table, getSeeds, seed) => {
await pg.query(`TRUNCATE ${table} RESTART IDENTITY CASCADE;`);
for (var s of getSeeds()) await seed(s);
};
await resetAndSeed(catalogTable, catalogSeed, upsertTest);
await resetAndSeed(resultsTable, resultsSeed, insertTestResult);
await resetAndSeed(alertingTable, alertingSeed, upsertAlertSilence);
process.exit();

View file

@ -0,0 +1,11 @@
export const table = "alerting";
export const seed = () => {
return [
{
name: `failing`,
class: `failing.js`,
method: "FAKEMETHOD",
expires: new Date().toJSON(),
},
];
};

View file

@ -0,0 +1,126 @@
export const table = "catalog";
export const seed = () => {
return [
{
name: "single",
class: "single.js",
image: "node:latest",
path: "tests/assets/suite/single.js",
description: "This is a single test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "ignore_alt"],
},
{
name: "failing",
class: "failing.js",
image: "node:latest",
path: "tests/assets/suite/failing.js",
description: "This is a failing test",
type: "ui",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
},
{
name: "primary",
class: "primary.js",
image: "node:latest",
path: "tests/assets/suite/primary.js",
description: "This is a primary test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: [
"pipeline",
"cron_1hour",
"reg_us",
"proj_core",
"ignore_alt",
"triggers_secondary1",
"triggers_secondary2",
],
},
{
name: "secondary1",
class: "secondary1.js",
image: "node:latest",
path: "tests/assets/suite/secondary1.js",
description: "This is a secondary test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: [
"pipeline",
"cron_1hour",
"reg_us",
"proj_core",
"triggers_tertiary1",
"triggers_tertiary2",
"delay_1sec",
],
},
{
name: "secondary2",
class: "secondary2.js",
image: "node:latest",
path: "tests/assets/suite/secondary2.js",
description: "This is a secondary2 test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: [
"pipeline",
"cron_1hour",
"reg_us",
"proj_core",
"triggers_tertiary3",
],
},
{
name: "tertiary1",
class: "tertiary1.js",
image: "node:latest",
path: "tests/assets/suite/tertiary1.js",
description: "This is a third test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core"],
},
{
name: "tertiary2",
class: "tertiary2.js",
image: "node:latest",
path: "tests/assets/suite/tertiary2.js",
description: "This is a third2 test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_10sec"],
},
{
name: "tertiary3",
class: "tertiary3.js",
image: "node:latest",
path: "tests/assets/suite/tertiary3.js",
description: "This is a third3 test",
type: "api",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_5sec"],
},
{
name: "single-alt",
class: "single-alt.js",
image: "node:latest",
path: "tests/assets/suite/single-alt.js",
description: "This is an alternative test",
type: "ui",
created: new Date().toJSON(),
mergeRequest: "https://example.com",
tags: ["cron_1hour", "reg_us", "env_ci", "proj_alt"],
},
];
};

View file

@ -0,0 +1,29 @@
export const table = "results";
export const seed = () => {
return [
{
name: "failing",
class: "failing.js",
method: "FAKEMETHOD",
env: "prod",
timestamp: new Date().toJSON(),
triage: false,
failed: true,
message: "Some Test FailureMessage",
screenshot: "https://picsum.photos/1920/1080",
console: "https://example.com",
},
{
name: "secondary1",
class: "secondary1.js",
method: "FAKEMETHOD",
env: "prod",
timestamp: new Date().toJSON(),
triage: false,
failed: true,
message: "Some Test FailureMessage from Secondary1",
screenshot: "https://picsum.photos/1920/1080",
console: "https://example.com",
},
];
};

26
lib/database/tags.js Normal file
View file

@ -0,0 +1,26 @@
import { WARN } from "../util/logging.js";
export const TAGS = {
IGNORE: { name: "ignore", tag: "ignore_", value: (t) => t },
CRON: { name: "crons", tag: "cron_", value: (t) => t },
ENV: { name: "env", tag: "env_", value: (t) => t },
REGIONS: { name: "regions", tag: "reg_", value: (t) => t },
PIPELINE: { name: "pipeline", tag: "pipeline", value: (t) => t },
COVERAGE: { name: "coverage", tag: "coverage_", value: (t) => t },
PROJECT: { name: "projects", tag: "proj_", value: (t) => t },
DELAY: { name: "delay", tag: "delay_", value: (t) => t },
TRIGGERS: { name: "triggers", tag: "triggers_", value: (t) => t },
};
export default function getFilteredTags(tags) {
const filtered = {};
for (var t of tags) {
const tag = Object.values(TAGS).find((ta) => t.startsWith(ta.tag));
if (!tag) {
WARN("CATALOG", `Tag '${t}' did not have a valid prefix!`);
continue;
}
if (!filtered[tag.name]) filtered[tag.name] = [];
filtered[tag.name].push(tag.value(t.replace(tag.tag, "")));
}
return filtered;
}

View file

@ -27,6 +27,8 @@ class JobManager {
pushLog(jobId, log) {
const job = this.getJobById(jobId);
if (!job) return;
if (log instanceof Array) job.log.push(...log);
else job.log.push(log);
}

View file

@ -1,11 +1,5 @@
const baseCommand = "node";
const suiteEntry = "tests/assets/suite/runner.js";
const pipelineMapping = [
{
id: 0,
pipeline: [{ name: "primary" }, { name: "secondary", delay: 5000 }],
},
];
const buildCommon = (jobRequest) => {
const { isTriage, ignore, region, testNames } = jobRequest;
@ -25,7 +19,6 @@ const buildManual = (jobReq) => {
throw Error("Currently only 1 test can be selected!");
command.push(`test=${testNames[0]}`);
return { ...jobReq, command };
};
@ -51,7 +44,6 @@ const buildPipeline = (jobReq, socketId) => {
};
export default function jobBuilder(jobRequest, id) {
console.log(jobRequest);
const jobReq = buildCommon(jobRequest, id);
const { pipeline, testNames, tags } = jobReq;
if (pipeline) return buildPipeline(jobReq, id);

View file

@ -5,22 +5,13 @@
"name": "qltr-job-test-suite-1"
},
"spec": {
"ttlSecondsAfterFinished": 2,
"template": {
"spec": {
"containers": [
{
"resources": {
"requests": {
"memory": "64MI",
"cpu": "250m"
},
"limits": {
"memory": "128MI",
"cpu": "500m"
}
},
"name": "qltr-job-test-suite-1",
"image": "node",
"image": "node:latest",
"imagePullPolicy": "Always",
"command": ["node", "--version"]
}

View file

@ -1,16 +1,24 @@
import cp from "node:child_process";
import k8s from "@kubernetes/client-node";
import { INFO, ERR } from "../../util/logging.js";
import { jobBuilder, createFile, deleteFile } from "./k8s-common.js";
const applyFile = async (filePath) => {
const command = `kubectl apply -f ${filePath}`;
return new Promise((res, rej) =>
cp.exec(command, (err, stdout, stderr) => (err && rej(err)) || res(stdout))
);
};
export default async function createJob(jobRequest) {
const job = jobBuilder(jobRequest);
const filePath = createFile(job);
job.spec.template.spec.containers[0].image = "node:latest";
job.spec.template.spec.containers[0].command = ["node", "--version"];
// job.spec.template.spec.containers[0].image = "reed";
// job.spec.template.spec.containers[0].command = "python3 -m pytest -v --tb=no -p no:warnings".split(" ");
const kc = new k8s.KubeConfig();
kc.loadFromCluster();
const batchV1Api = kc.makeApiClient(k8s.BatchV1Api);
const batchV1beta1Api = kc.makeApiClient(k8s.BatchV1beta1Api);
const jobName = job.metadata.name;
batchV1Api
.createNamespacedJob("dunestorm-dunemask", job)
.then((res) => INFO("K8S", `Job ${jobName} created!`))
.catch((err) => ERR("K8S", err));
/*const filePath = createFile(job);
applyFile(filePath);
deleteFile(filePath);
deleteFile(filePath);*/
}

View file

@ -2,16 +2,21 @@ import Rabbiteer from "rabbiteer";
import buildWorkers from "./workers/index.js";
// Pull Environment Variables
const { RABBIT_HOST: host, RABBIT_USER: user, RABBIT_PASS: pass } = process.env;
const {
QUALITEER_RABBIT_HOST: host,
QUALITEER_RABBIT_USER: user,
QUALITEER_RABBIT_PASS: pass,
} = process.env;
// Rabbit Config
const rabbitConfig = {
host: host ?? "localhost",
user: user ?? "rabbit",
pass: pass ?? "rabbit",
protocol: "amqp:",
host: `amqp://${host}` ?? "localhost",
user: user ?? "guest",
pass: pass ?? "guest",
};
const buildRabbiteer = (skio) =>
const buildRabbiteer = (pg, skio) =>
new Rabbiteer(null, buildWorkers(skio), { autoRabbit: rabbitConfig });
export default buildRabbiteer;

View file

@ -1,5 +1,7 @@
// Imports
import { Worker } from "rabbiteer";
import { VERB } from "../../util/logging.js";
import { insertTestResult } from "../../database/queries/results.js";
import evt from "../../sockets/events.js";
// Class
export default class TestResultsWorker extends Worker {
@ -23,9 +25,9 @@ export default class TestResultsWorker extends Worker {
consoleLogUrl: https://consolelog”
}
*/
onMessage(testResult) {
async onMessage(testResult) {
const { pipeline } = testResult;
await this.handleReporting(testResult);
// Alter to start next test
// TODO the delay should be autopopulated either by the suite, or filled in by the server
if (pipeline) return this.pipelineTrigger(pipeline);
@ -35,4 +37,9 @@ export default class TestResultsWorker extends Worker {
const { dashboardSocketId: dsi } = pipeline;
this.skio.to(dsi).emit(evt.PPL_TRG, pipeline);
}
handleReporting(result) {
VERB("TestResults", result.name);
insertTestResult(result);
}
}

View file

@ -1,5 +1,9 @@
import { Router, json as jsonMiddleware } from "express";
import { getSilencedTests } from "../database/queries/alerting.js";
import {
getSilencedTests,
upsertAlertSilence,
deleteAlertSilence,
} from "../database/queries/alerting.js";
const router = Router();
// Apply Middlewares
@ -12,7 +16,26 @@ router.get("/silenced", (req, res) => {
// Post Routes
router.post("/silence", (req, res) => {
res.sendStatus(200);
const { name, class: className, method, expires, keepExpires } = req.body;
if (!name || !className || !method)
return res
.status(400)
.send("'name', 'class', and 'method' are all required Fields!");
if (expires === null)
return deleteAlertSilence(req.body)
.then(() => res.sendStatus(200))
.catch((e) => res.status(500).send(e));
const { h, m } = keepExpires ? {} : expires;
if (!keepExpires && (h == null || m == null))
return res.status(400).send("Both 'h' and 'm' are required fields!");
if (!keepExpires && (h < 0 || m < 0))
return res
.status(400)
.send("'h' and 'm' must be greater than or equal to 0!");
// TODO set max times as well
if (!keepExpires && (h > 72 || m > 59))
res.status(400).send("'h' and 'm' must not exceed the set maxes!");
upsertAlertSilence(req.body).then(() => res.sendStatus(200));
});
export default router;

View file

@ -1,5 +1,9 @@
import { Router, json as jsonMiddleware } from "express";
import { getTests, getPipelineMappings } from "../database/queries/catalog.js";
import {
getTests,
getPipelineMappings,
upsertTest,
} from "../database/queries/catalog.js";
const router = Router();
const maxSize = 1024 * 1024 * 100; // 100MB
@ -18,8 +22,10 @@ router.get("/pipeline-mappings", (req, res) => {
// Post Routes
router.post("/update", (req, res) => {
// Update All Tests
res.sendStatus(200);
if (!req.body) return res.status(400).send("Body required!");
if(!Array.isArray(req.body)) return res.status(400).send("Body must be an array!");
const upserts = Promise.all(req.body.map((catalogItem)=>upsertTest(catalogItem)));
upserts.then(()=>res.sendStatus(200)).catch((e)=>res.status(500).send(e));
});
export default router;

View file

@ -1,5 +1,8 @@
import { Router, json as jsonMiddleware } from "express";
import { getCurrentlyFailing } from "../database/queries/results.js";
import {
getCurrentlyFailing,
ignoreResult,
} from "../database/queries/results.js";
const router = Router();
// Apply Middlewares
@ -15,4 +18,10 @@ router.post("/history", (req, res) => {
res.send([]);
});
router.post("/ignore", (req, res) => {
if (!req.body || !req.body.id)
return res.status(400).send("'id' is required!");
ignoreResult(req.body).then(() => res.sendStatus(200));
});
export default router;

View file

@ -2,6 +2,7 @@
import express from "express";
// Routes
import vitals from "../routes/vitals-route.js";
import results from "../routes/results-route.js";
import alerting from "../routes/alerting-route.js";
import react from "../routes/react-route.js";
@ -12,6 +13,7 @@ import buildDevRoute from "../routes/dev-route.js";
export default function buildRoutes(pg, skio) {
const router = express.Router();
// Special Routes
router.use(vitals);
router.all("/", (req, res) => res.redirect("/qualiteer"));
if (process.env.USE_DEV_ROUTER === "true")
router.use("/api/dev", buildDevRoute(pg, skio));

View file

@ -0,0 +1,7 @@
import { Router } from "express";
const router = Router();
// Get Routes
router.get("/healthz", (req, res) => res.sendStatus(200));
export default router;

View file

@ -13,7 +13,7 @@ const OUT = "o";
export default class Executor {
constructor(config, payload) {
this.url = config.url(payload) ?? process.env.QUALITEER_URL;
this.url = config.url(payload) ?? process.env.QUALITEER_EXECUTOR_URL;
this.jobId = config.jobId(payload) ?? process.env.QUALITEER_JOB_ID;
this.command = config.command(payload) ?? process.env.QUALITEER_COMMAND;
this.mode = modes.EXEC;