Revamp Job flow

This commit is contained in:
Elijah Dunemask 2022-10-15 11:47:47 +00:00
parent 945afdfbbe
commit 4a0a4b29a5
86 changed files with 592 additions and 608 deletions

View file

@ -10,9 +10,8 @@ COPY dist dist
COPY src src
COPY lib lib
COPY index.html .
COPY executor.config.js .
COPY vite.config.js .
RUN npm run build:all
RUN npm run build:react
# Copy bin over
COPY bin bin
CMD ["npm","start"]

View file

@ -28,7 +28,7 @@
## v0.0.5
- [ ] Docker config
- [x] Docker config
- [ ] Gitlab Integration
- [ ] Garden config

Binary file not shown.

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,38 @@
import { URL } from "node:url";
import path from "node:path";
import caxa from "caxa";
import { rollup } from "rollup";
import loadConfigFile from "rollup/loadConfigFile";
import { executorLibraryDir, binName, scriptName } from "./executor-config.js";
// Fix import
const { default: caxaPackage } = caxa;
// Rollup Config
const rollupConfigPath = path.resolve(executorLibraryDir, "rollup.config.js");
// Build functions
async function packageBin() {
console.log("Packaging bundle into binary");
return caxaPackage({
input: "dist/bundles/",
output: `bin/${binName}`,
command: ["{{caxa}}/node_modules/.bin/node", `{{caxa}}/${scriptName}`],
uncompressionMessage: "Unpacking, please wait...",
});
}
async function rollupBundle() {
console.log("Rolling up executor into bundle");
const { options, warnings } = await loadConfigFile(rollupConfigPath);
if (warnings.count !== 0)
console.log(`Rollup has ${warnings.count} warnings`);
warnings.flush();
for (const optionsObj of options) {
const bundle = await rollup(optionsObj);
await Promise.all(optionsObj.output.map(bundle.write));
}
}
await rollupBundle();
await packageBin();
console.log("Done");

View file

@ -0,0 +1,5 @@
export const executorLibraryDir = new URL(".", import.meta.url).pathname;
export const binName = "qltr-executor";
export const configName = "executor.config.mjs";
export const scriptName = "qualiteer-executor.mjs";
export const entrypointName = "executor-entrypoint.js";

View file

@ -1,9 +1,9 @@
import path from "node:path";
import Executor from "../../sockets/clients/Executor.js";
import Executor from "../sockets/clients/Executor.js";
import { normalize } from "./executor-configurator.js";
const { default: executorConfig } = await import(
path.resolve("executor.config.js")
);
import { configName as executorConfigName } from "./executor-config.js";
const executorConfigPath = path.resolve(executorConfigName);
const { default: executorConfig } = await import(executorConfigPath);
// Load config and args
const args = process.argv.slice(2);

View file

@ -1,11 +1,17 @@
import path from "node:path";
import { nodeResolve } from "@rollup/plugin-node-resolve";
import commonjs from "@rollup/plugin-commonjs";
import { terser } from "rollup-plugin-terser";
import {
executorLibraryDir,
entrypointName,
scriptName,
} from "./executor-config.js";
export default {
input: "lib/jobs/executor/executor-entrypoint.js",
input: path.resolve(executorLibraryDir, entrypointName),
output: {
file: "dist/bundles/qualiteer-executor.mjs",
file: `dist/bundles/${scriptName}`,
},
plugins: [nodeResolve(), commonjs(), terser()],
};

View file

@ -1,56 +0,0 @@
<<<<<<< HEAD
CREATE SEQUENCE test_results_id_seq;
CREATE TABLE test_results (
id bigint NOT NULL DEFAULT nextval('test_results_seq') PRIMARY KEY,
test_name varchar(255) DEFAULT NULL,
test_class varchar(255) DEFAULT NULL,
test_method varchar(255) DEFAULT NULL,
test_path varchar(255) DEFAULT NULL,
test_type varchar(32) DEFAULT NULL,
test_timestamp timestamptz NOT NULL DEFAULT now(),
test_retry BOOLEAN DEFAULT FALSE,
origin varchar(255) DEFAULT NULL,
failed BOOLEAN DEFAULT FALSE,
failed_message varchar(2047) DEFAULT NULL,
screenshot_url varchar(255) DEFAULT NULL,
weblog_url varchar(255) DEFAULT NULL,
);
ALTER SEQUENCE test_results_id_seq OWNED BY test_results.id;
=======
> > > > > > > b023d8910c89d80573499890a958c0df649849e1
# Tables
PG Database Tables Mapped Out
<<<<<<< HEAD
## `test_results`
| id | test_name | test_class | test_method | test_path | test_type | test_timestamp | test_retry | origin | failed | failed_message | screenshot_url | weblog_url |
| int | string | string | string | string | string | timestamp | boolean | string | boolean | string | string | string |
| 1 | My Test | My Test Class | My Failing Test Method | My Test Class Path | API | Date.now() | false | Test Suite A | true | Some Failure Messsage | screenshotUrl | weblogUrl |
=======
Table `test_results`
| id | test_name | test_class | test_method | test_path | test_type | test_timestamp | test_retry | origin | failed | failed_message | screenshot_url | weblog_url |
|-----|-----------|------------|-------------|---------------------|---------------|----------------|------------|--------------|---------|------------------------|---------------------|--------------------|
| int | string | string | string | string | string | timestamp | boolean | string | boolean | string | string | string |
| 1 | My Test | My Class | My Method | /path/to/test_class | API/UI/MOBILE | Date.now() | false | Test Suite A | true | I am a test that fails | https://example.com | http://example.com |
> > > > > > > b023d8910c89d80573499890a958c0df649849e1
- id Automatically Generated
- test_name\* Name of test
- test_class\* Name of class
- test_method Name of failed method if failed else null
- test_path Path to test class
- test_type API/UI/Mobile
- test_timestamp UTC Timestamp
- test_retry Should test remedy failed tests
- origin Test Suite test belongs to
- failed Indicates if the test failed or not
- failed_message Failure Message of test or null
- screenshot_url Screenshot of failure
- weblog_url Log from the web console

View file

@ -1,30 +0,0 @@
import pg from "./postgres.js";
import { upsertTest } from "./queries/catalog.js";
import { insertTestResult } from "./queries/results.js";
import { upsertAlertSilence } from "./queries/alerting.js";
import {
seed as catalogSeed,
table as catalogTable,
} from "./seeds/catalog-seed.js";
import {
seed as resultsSeed,
table as resultsTable,
} from "./seeds/results-seed.js";
import {
seed as alertingSeed,
table as alertingTable,
} from "./seeds/alerting-seed.js";
const database = process.env.POSTGRES_DATABASE ?? "qualiteer";
await pg.connect();
const resetAndSeed = async (table, getSeeds, seed) => {
await pg.query(`TRUNCATE ${table} RESTART IDENTITY CASCADE;`);
for (var s of getSeeds()) await seed(s);
};
await resetAndSeed(catalogTable, catalogSeed, upsertTest);
await resetAndSeed(resultsTable, resultsSeed, insertTestResult);
await resetAndSeed(alertingTable, alertingSeed, upsertAlertSilence);
process.exit();

View file

@ -1 +1 @@
export { default } from "./core/Qualiteer.js";
export { default } from "./server/core/Qualiteer.js";

View file

@ -1,50 +0,0 @@
import { URL } from "url";
import loadConfigFile from "rollup/loadConfigFile";
import path from "path";
import { rollup } from "rollup";
import caxa from "caxa";
import { verify, normalize } from "./executor-configurator.js";
const { default: executorConfig } = await import(
path.resolve("executor.config.js")
);
const __dirname = new URL(".", import.meta.url).pathname;
const { default: caxaPackage } = caxa;
function testConfig() {
console.log("Testing config");
verify(normalize(executorConfig([])));
}
async function packageBin() {
console.log("Packaging bundle into binary");
return caxaPackage({
input: "dist/bundles/",
output: "bin/executor",
command: [
"{{caxa}}/node_modules/.bin/node",
"{{caxa}}/qualiteer-executor.mjs",
],
uncompressionMessage: "Unpacking, please wait...",
});
}
async function rollupBundle() {
console.log("Rolling up executor into bundle");
const { options, warnings } = await loadConfigFile(
path.resolve(__dirname, "rollup.config.js")
);
console.log(`Rollup has ${warnings.count} warnings`);
warnings.flush();
for (const optionsObj of options) {
const bundle = await rollup(optionsObj);
await Promise.all(optionsObj.output.map(bundle.write));
}
}
testConfig();
await rollupBundle();
await packageBin();
console.log("Done");

View file

@ -1,53 +0,0 @@
const baseCommand = "node";
const suiteEntry = "tests/assets/suite/runner.js";
const buildCommon = (jobRequest) => {
const { isTriage, ignore, region, testNames } = jobRequest;
const command = [baseCommand, suiteEntry];
// Apply Common Flags
if (isTriage) command.push(`isTriage=${isTriage}`);
if (ignore && ignore.length > 0) console.log("Would ignore", ignore);
if (region) command.push(`region=${region}`);
// Return new request
return { ...jobRequest, command };
};
const buildManual = (jobReq) => {
const { command, testNames } = jobReq;
if (testNames.length > 1)
throw Error("Currently only 1 test can be selected!");
command.push(`test=${testNames[0]}`);
return { ...jobReq, command };
};
const buildTags = (jobReq) => {
const { command, tags, testNames } = jobReq;
if (testNames && testNames.length > 0) {
return console.log("Would run tags as manual");
}
const arg = Buffer.from(JSON.stringify(tags), "utf8").toString("base64");
command.push(`tags=${arg}`);
return { ...jobReq, command };
};
const buildPipeline = (jobReq, socketId) => {
const { command, pipeline } = jobReq;
const { __test: test } = pipeline;
if (!test) throw Error("__test is required for pipeline jobs!");
pipeline.dashboardSocketId = socketId;
const arg = Buffer.from(JSON.stringify(pipeline), "utf8").toString("base64");
command.push(`pipeline=${arg}`);
command.push(`test=${test}`);
return { ...jobReq, command };
};
export default function jobBuilder(jobRequest, id) {
const jobReq = buildCommon(jobRequest, id);
const { pipeline, testNames, tags } = jobReq;
if (pipeline) return buildPipeline(jobReq, id);
else if (tags) return buildTags(jobReq);
else if (testNames) return buildManual(jobReq); //TODO currently does nothing
else throw Error("At least 1 'pipeline or tags or testNames' is required! ");
}

View file

@ -1,9 +0,0 @@
import Executor from "../sockets/clients/Executor.js";
const args = process.argv.slice(2);
const url = args[0];
const jobId = args[1];
const command = args.slice(2);
const job = { id: jobId, command };
const exec = new Executor(url, job, command);
exec.runJob();

View file

@ -1,12 +1,19 @@
import { v4 } from "uuid";
import applyJobInternally from "./k8s/k8s-internal.js";
import applyJob from "./k8s/k8s.js";
import buildJob from "./job-builder.js";
import { getTest } from "../database/queries/catalog.js";
import applyJobInternally from "../k8s/k8s-internal.js";
import applyJob from "../k8s/k8s.js";
const maxJobs = process.env.MAX_JOBS ? parseInt(process.env.MAX_JOBS) : 3;
const internalDeploy = process.env.INTERNAL_DEPLOY === "true";
const launchJob = internalDeploy ? applyJobInternally : applyJob;
async function getTests(job) {
if (job.pipeline) return [await getTest(job.pipeline.__test)];
if (!job.testNames) return [];
const tests = await Promise.all(job.testNames.map((name) => getTest(name)));
return tests;
}
class JobManager {
constructor() {
this.clientMaxJobs = maxJobs;
@ -35,16 +42,21 @@ class JobManager {
closeJob(jobId, exitcode) {
const job = this.getJobById(jobId);
if (!job) return;
job.exitcode = exitcode;
}
newJob(jobRequest, id) {
async newJob(jobRequest, id) {
if (!jobRequest) throw Error("Request Must Be Object!");
if (!this.clients[id]) this.clients[id] = { jobs: [] };
const job = buildJob(jobRequest, id);
const job = { ...jobRequest };
job.image = "registry.dunemask.net/garden/dev/reed:latest";
job.id = v4();
job.log = [];
this.clients[id].jobs.push(job);
job.dashboardSocketId = id;
job.tests = await getTests(job);
for (var t of job.tests) if (!t) throw Error("1 or more tests not found!");
launchJob(job);
return { ...job };
}

View file

@ -7,8 +7,8 @@ import { INFO, OK, logInfo } from "../util/logging.js";
// Import Core Modules
import buildRoutes from "../routes/router.js";
import pg from "../database/postgres.js";
import injectSockets from "../sockets/socket-server.js";
import JobManager from "../jobs/JobManager.js";
import injectSockets from "./socket-server.js";
import JobManager from "./JobManager.js";
import buildRabbiteer from "../rabbit/rabbit-workers.js";
// Constants

View file

@ -1,13 +1,13 @@
import evt from "./events.js";
import evt from "../../common/sockets/events.js";
export const initiator = (socket, jobs) => {
export const initiator = async (socket, jobs) => {
const jobStr = socket.handshake.query.job;
const jobReq = JSON.parse(jobStr);
if (!jobReq || !(jobReq instanceof Object))
throw Error("No 'job' was included in the handshake query");
const job = jobs.newJob(jobReq, socket.id);
const job = await jobs.newJob(jobReq, socket.id);
socket.join(job.id);
socket.emit(evt.JOB_CRT, job.id);
};

View file

@ -1,6 +1,6 @@
import { Server as Skio } from "socket.io";
import evt from "./events.js";
import modes from "./modes.js";
import evt from "../../common/sockets/events.js";
import modes from "../../common/sockets/modes.js";
import { initiator, executor, viewer } from "./client-listeners.js";
@ -13,22 +13,28 @@ const socketDrop = (io, room, id) => {
s.disconnect();
};
const socketConnect = (io, socket, jobs) => {
const socketConnect = async (io, socket, jobs) => {
const { mode } = socket.handshake.query;
switch (mode) {
case modes.INIT:
initiator(socket, jobs);
break;
case modes.EXEC:
executor(io, socket, jobs);
break;
case modes.VIEW:
viewer(socket);
break;
default:
socket.send(evt.ERR, "Invalid Mode!");
socket.disconnect();
break;
try {
switch (mode) {
case modes.INIT:
await initiator(socket, jobs);
break;
case modes.EXEC:
executor(io, socket, jobs);
break;
case modes.VIEW:
viewer(socket);
break;
default:
socket.send(evt.ERR, "Invalid Mode!");
socket.disconnect();
break;
}
} catch (err) {
console.log(err);
socket.send(evt.ERR, err);
socket.disconnect();
}
};

View file

@ -1,4 +1,6 @@
// Imports
import path from "node:path";
import { URL } from "node:url";
import { migrate } from "postgres-migrations";
import createPgp from "pg-promise";
import moment from "moment";
@ -29,7 +31,8 @@ const dbConfig = {
ensureDatabaseExists: true,
};
const migrationsDir = "lib/database/migrations";
const databaseDir = new URL(".", import.meta.url).pathname;
const migrationsDir = path.resolve(databaseDir, "migrations/");
const queryMock = (str) => INFO("POSTGRES MOCK", str);

View file

@ -5,6 +5,7 @@ import {
selectWhereAnyQuery,
onConflictUpdate,
} from "../pg-query.js";
import { WARN } from "../../util/logging.js";
import getFilteredTags from "../tags.js";
import getDelay from "../delays.js";
@ -14,6 +15,21 @@ const PG_DISABLED = process.env.POSTGRES_DISABLED;
import { testsMock, mappingsMock } from "../mocks/catalog-mock.js";
// Queries
export const removeDroppedTests = async (testNames) => {
// BUG: After dropping a test, the id jumps ridiculously high
const pgNames = testNames.map((tn) => `'${tn}'`).join(",");
const query = `DELETE FROM catalog as x where x.name not in (${pgNames});`;
return pg.query(query);
};
export const getTest = async (name) => {
const query = selectWhereAnyQuery(table, { name });
const results = await pg.query(query);
if (results.length > 1)
WARN("CATALOG", `More than 1 test found for '${name}'`);
return results[0];
};
export const getTests = async () => {
if (PG_DISABLED) return testsMock();
const query = `SELECT * from ${table}`;
@ -54,6 +70,12 @@ export const getProjects = async () => {
}
};
export const truncateTests = async () => {
if (PG_DISABLED) return console.log(`Would truncate table ${table}`);
const query = `TRUNCATE ${table} RESTART IDENTITY CASCADE;`;
return await pg.query(query);
};
export const upsertTest = async (test) => {
if (PG_DISABLED) return console.log("Would insert test", test);
const {

View file

@ -1,4 +1,5 @@
import fs from "node:fs";
import { URL } from "node:url";
import path from "node:path";
const {
QUALITEER_EXECUTOR_URL,
@ -14,34 +15,32 @@ const executorBinFetchUrl = QUALITEER_EXECUTOR_BIN_URL;
const jobsDir = "jobs/";
const jobsPath = path.resolve(jobsDir);
const defaultsFile = path.resolve("./lib/jobs/k8s/k8s-job.json");
const defaults = JSON.parse(fs.readFileSync(defaultsFile));
const k8sFolder = new URL(".", import.meta.url).pathname;
const defaultsFilePath = path.resolve(k8sFolder, "k8s-job.json");
const defaults = JSON.parse(fs.readFileSync(defaultsFilePath));
function wrapCommand(jobId, command) {
const bin = executorAsScript
? `node ${executorBin}`
: `chmod +x ${executorBin} && ./${executorBin}`;
const executorPayload = JSON.stringify({ jobId, command, url: executorUrl });
function commandBuilder(jobId, jobRequest) {
const executorPayload = JSON.stringify({
jobId,
jobRequest,
url: executorUrl,
});
const payload = Buffer.from(executorPayload, "utf8").toString("base64");
const curlCmd = `if ! [ -f qltr-executor ]; then curl -o qltr-executor ${executorBinFetchUrl}; fi || true && ${bin} ${payload}`;
return curlCmd;
return [`./${executorBin}`, payload];
}
export function jobBuilder(jobRequest) {
const { resources, name, image, command, id: jobId } = jobRequest;
const { resources, name, image, id: jobId } = jobRequest;
// Safety Checks
if (!jobId) throw Error("'jobId' required!");
if (!name) throw Error("'name' required!");
if (!command) throw Error("'command' required!");
if (!image) throw Error("'image' required!");
if (!Array.isArray(command)) throw Error("'command' must be an array!");
// Apply configuration
const job = { ...defaults };
job.metadata.name = `qltr-${name}-${jobId}`;
job.metadata.name = `qltr-${jobId}`;
const container = job.spec.template.spec.containers[0];
container.name = job.metadata.name;
container.command = wrapCommand(jobId, command);
container.command = commandBuilder(jobId, jobRequest);
container.image = JSON.stringify(image);
// Apply resources
job.resources = { ...job.resources, ...resources };

View file

@ -13,9 +13,19 @@
"name": "qltr-job-test-suite-1",
"image": "node:latest",
"imagePullPolicy": "Always",
"command": ["node", "--version"]
"command": ["node", "--version"],
"envFrom": [
{
"configMapRef": {
"name": "qualiteer-job-environment"
}
}
]
}
],
"imagePullSecrets": [
{ "name": "usw-registry-secret", "namespace": "default" }
],
"restartPolicy": "Never"
}
},

View file

@ -1,13 +1,12 @@
import k8s from "@kubernetes/client-node";
import { INFO, ERR } from "../../util/logging.js";
import { INFO, ERR } from "../util/logging.js";
import { jobBuilder, createFile, deleteFile } from "./k8s-common.js";
export default async function createJob(jobRequest) {
//console.log(await jobRequest.tests);
const job = jobBuilder(jobRequest);
job.spec.template.spec.containers[0].image = "node:latest";
job.spec.template.spec.containers[0].command = ["node", "--version"];
// job.spec.template.spec.containers[0].image = "reed";
// job.spec.template.spec.containers[0].command = "python3 -m pytest -v --tb=no -p no:warnings".split(" ");
job.spec.template.spec.containers[0].image =
"registry.dunemask.net/garden/dev/reed:latest";
const kc = new k8s.KubeConfig();
kc.loadFromCluster();
const batchV1Api = kc.makeApiClient(k8s.BatchV1Api);
@ -17,8 +16,4 @@ export default async function createJob(jobRequest) {
.createNamespacedJob("dunestorm-dunemask", job)
.then((res) => INFO("K8S", `Job ${jobName} created!`))
.catch((err) => ERR("K8S", err));
/*const filePath = createFile(job);
applyFile(filePath);
deleteFile(filePath);*/
}

View file

@ -2,7 +2,7 @@
import { Worker } from "rabbiteer";
import { VERB } from "../../util/logging.js";
import { insertTestResult } from "../../database/queries/results.js";
import evt from "../../sockets/events.js";
import evt from "../../../common/sockets/events.js";
// Class
export default class TestResultsWorker extends Worker {
constructor(skio) {

View file

@ -3,6 +3,8 @@ import {
getTests,
getPipelineMappings,
upsertTest,
truncateTests,
removeDroppedTests,
} from "../database/queries/catalog.js";
const router = Router();
@ -23,9 +25,20 @@ router.get("/pipeline-mappings", (req, res) => {
// Post Routes
router.post("/update", (req, res) => {
if (!req.body) return res.status(400).send("Body required!");
if(!Array.isArray(req.body)) return res.status(400).send("Body must be an array!");
const upserts = Promise.all(req.body.map((catalogItem)=>upsertTest(catalogItem)));
upserts.then(()=>res.sendStatus(200)).catch((e)=>res.status(500).send(e));
if (!Array.isArray(req.body))
return res.status(400).send("Body must be an array!");
const wrongImage = req.body.find(({ image }) => image !== req.body[0].image);
if (wrongImage)
return res.status(400).send("Tests cannot have unique images!");
const testNames = req.body.map(({ name }) => name);
// Upsert new tests
const upserts = Promise.all(
req.body.map((catalogItem) => upsertTest(catalogItem))
);
const dropRm = upserts.then(() => removeDroppedTests(testNames));
dropRm.then(() => res.sendStatus(200)).catch((e) => res.status(500).send(e));
});
export default router;

13
package-lock.json generated
View file

@ -40,6 +40,7 @@
"axios": "^0.27.2",
"caxa": "^2.1.0",
"concurrently": "^7.3.0",
"lodash.merge": "^4.6.2",
"nodemon": "^2.0.19",
"prettier": "^2.7.1",
"react": "^18.2.0",
@ -4078,6 +4079,12 @@
"integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=",
"dev": true
},
"node_modules/lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true
},
"node_modules/lodash.sortby": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
@ -9260,6 +9267,12 @@
"integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=",
"dev": true
},
"lodash.merge": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true
},
"lodash.sortby": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",

View file

@ -7,25 +7,21 @@
"type": "module",
"exports": {
".": "./lib/index.js",
"./clients": "./lib/sockets/clients/index.js",
"./web-clients": "./lib/sockets/clients/web.index.js"
"./clients": "./lib/common/sockets/clients/index.js",
"./web-clients": "./lib/common/sockets/clients/web.index.js"
},
"bin": {
"qualiteer": "./dist/app.js"
},
"scripts": {
"build:all": "concurrently \"npm run build:react\" \"npm run build:executor\" -n v,s -p -c yellow,green",
"build:executor": "node lib/jobs/executor/executor-bundler.js",
"build:executor": "node lib/common/executor/executor-bundler.js",
"build:react": "vite build",
"start": "node dist/app.js",
"dev:server": "nodemon dist/app.js",
"dev:react": "vite",
"start:dev": "concurrently -k \"QUALITEER_DEV_PORT=52025 npm run dev:server\" \" QUALITEER_VITE_DEV_PORT=52000 QUALITEER_VITE_BACKEND_URL=http://localhost:52025 npm run dev:react\" -n s,v -p -c green,yellow",
"test": "node tests/index.js",
"test:api": "node tests/api.js",
"test:dev": "nodemon tests/index.js",
"lint": "prettier -w lib/ src/",
"seed": "node lib/database/seed.js"
"lint": "prettier -w lib/ src/"
},
"browserslist": {
"production": [
@ -46,7 +42,6 @@
"dotenv": "^16.0.2",
"express": "^4.18.1",
"figlet": "^1.5.2",
"lodash": "^4.17.21",
"moment": "^2.29.4",
"path": "^0.12.7",
"pg-promise": "^10.12.0",
@ -68,6 +63,7 @@
"axios": "^0.27.2",
"caxa": "^2.1.0",
"concurrently": "^7.3.0",
"lodash.merge": "^4.6.2",
"nodemon": "^2.0.19",
"prettier": "^2.7.1",
"react": "^18.2.0",

View file

@ -1,6 +1,6 @@
// Import Contexts
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
import { JobProvider } from "@qltr/jobs";
import { JobProvider } from "@qltr/jobctx";
import { StoreProvider } from "@qltr/store";
import { BrowserRouter } from "react-router-dom";
// Import Views

View file

@ -1,16 +1,6 @@
import React, { useReducer, createContext, useMemo } from "react";
import Initiator from "@qltr/initiator";
const JobContext = createContext();
export const jobStatus = {
OK: "o",
QUEUED: "q",
PENDING: "p",
CANCELED: "c",
ACTIVE: "a",
ERROR: "e",
};
const ACTIONS = {
CREATE: "c",
UPDATE: "u",
@ -18,12 +8,7 @@ const ACTIONS = {
PIPELINE: "p",
};
const url = "/";
const initialState = {
jobs: [],
pipelines: [],
};
const initialState = { jobs: [], pipelines: [] };
const reducer = (state, action) => {
// Current Jobs
@ -56,220 +41,21 @@ const reducer = (state, action) => {
export const JobProvider = ({ children }) => {
const [state, dispatch] = useReducer(reducer, initialState);
const jobUpdate = (job, jobId) =>
dispatch({ type: ACTIONS.UPDATE, jobId, job });
const jobCreate = (job) =>
dispatch({ type: ACTIONS.CREATE, job: { ...job, log: [] } });
const jobDelete = (jobId) => dispatch({ type: ACTIONS.DELETE, jobId });
const updatePipelines = (pipelines) =>
dispatch({ type: ACTIONS.pipeline, pipelines });
function retryAll(failing) {
// Query Full Locator
console.log("Would retry all failing tests!");
return jobFactory({ testNames: ["single"] });
}
function pipelineComponentJob(jobPipeline, pipelineReq) {
const i = new Initiator(url);
const jobId = `j${Date.now()}`;
const job = {
name: jobId,
status: jobStatus.PENDING,
jobId,
isPipeline: true,
initiator: i,
pipelineId: jobPipeline.id,
branchId: pipelineReq.pipeline.__test,
};
const request = {
image: "node",
name: jobId,
...pipelineReq,
};
jobCreate(job);
const onLog = (d) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.log.push(d);
job.status = jobStatus.ACTIVE;
jobUpdate({ ...job }, jobId);
};
const onClose = (c) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.exitcode = c;
job.status = c === 0 ? jobStatus.OK : jobStatus.ERROR;
jobUpdate({ ...job }, jobId);
};
const onPipelineTrigger = (p) => {
const { triggers } = p;
for (var t in triggers) {
if (t === "__testDelay") continue;
const delay = triggers[t].__testDelay ?? 0;
delete triggers[t].__testDelay;
const jobReq = {
...request,
pipeline: {
...p,
triggers: triggers[t],
__test: t,
},
};
jobPipeline.pendingTriggers.push({
testName: t,
timer: setTimeout(
() => pipelineComponentJob(jobPipeline, jobReq),
delay
),
triggerAt: Date.now() + delay,
});
}
};
const started = i.newPipelineJob(
request,
onLog,
onClose,
() => {},
onPipelineTrigger
);
started.then(() => jobUpdate({ status: jobStatus.ACTIVE }, jobId));
}
function pipelineFactory(builderCache) {
const { tree, branches, selectedBranches } = builderCache;
const __test = Object.keys(tree)[0];
const pipelineReq = {
image: "node",
pipeline: { __test, triggers: { ...tree[__test] } },
isTriage: builderCache.triageFailing,
};
const id = `pij${Date.now()}`;
const pipeline = { id, branches, pendingTriggers: [], selectedBranches };
const { pipelines } = state;
pipelines.push(pipeline);
updatePipelines([...pipelines]);
pipelineComponentJob(pipeline, pipelineReq);
return pipeline;
}
function pipelineCancel(pipelineId) {
const pipeline = state.pipelines.find((p) => p.id === pipelineId);
pipeline.isCanceled = true;
pipeline.pendingTriggers.forEach((t) => clearTimeout(t));
const jobs = state.jobs.filter(
(j) => j.isPipeline && j.pipelineId === pipelineId
);
for (var j of jobs) {
if (j.initiator.sk) j.initiator.sk.close();
j.status = jobStatus.CANCELED;
jobUpdate({ ...j }, j.jobId);
}
}
function pipelineDestroy(pipelineId) {
const pipelineIndex = state.pipelines.findIndex((p) => p.id === pipelineId);
const pipeline = state.pipelines[pipelineIndex];
pipeline.pendingTriggers.forEach((t) => clearTimeout(t));
const jobs = state.jobs.filter(
(j) => j.isPipeline && j.pipelineId === pipelineId
);
for (var j of jobs) {
if (
j.initiator.sk &&
j.status !== jobStatus.OK &&
j.status !== jobStatus.ERROR &&
j.status !== jobStatus.CANCELED
) {
j.initiator.sk.close();
}
jobDelete(j.jobId);
}
state.pipelines.splice(pipelineIndex, 1);
}
function jobCancel(jobId) {
const job = state.jobs.find((j) => j.jobId === jobId);
if (job.initiator.sk) job.initiator.sk.close();
job.status = jobStatus.CANCELED;
jobUpdate({ ...job }, jobId);
}
function jobDestroy(jobId) {
const job = state.jobs.find((j) => j.jobId === jobId);
if (
job.initiator.sk &&
job.status !== jobStatus.OK &&
job.status !== jobStatus.ERROR &&
job.status !== jobStatus.CANCELED
) {
job.initiator.sk.close();
}
jobDelete(jobId);
}
function jobFactory(builderCache) {
if (builderCache.tree) return pipelineFactory(builderCache);
// Find test
const i = new Initiator(url);
const jobId = `j${Date.now()}`;
const job = {
name: jobId,
status: jobStatus.PENDING,
jobId,
isPipeline: false,
builderCache,
initiator: i,
};
const request = {
testNames: builderCache.testNames,
image: "node",
type: "single",
name: jobId,
isTriage: builderCache.isTriage,
};
jobCreate(job);
const onLog = (d) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.log.push(d);
job.status = jobStatus.ACTIVE;
jobUpdate({ ...job }, jobId);
};
const onClose = (c) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.exitcode = c;
job.status = c === 0 ? jobStatus.OK : jobStatus.ERROR;
jobUpdate({ ...job }, jobId);
};
const started = i.newJob(request, onLog, onClose, () => {});
started.then(() => jobUpdate({ status: jobStatus.ACTIVE }, jobId));
return jobId;
}
const context = {
state,
dispatch,
jobUpdate,
jobCreate,
jobDelete,
retryAll,
jobFactory,
jobCancel,
jobDestroy,
pipelineCancel,
pipelineDestroy,
updatePipelines,
};
const contextValue = useMemo(() => context, [state, dispatch]);
@ -277,5 +63,4 @@ export const JobProvider = ({ children }) => {
<JobContext.Provider value={contextValue}>{children}</JobContext.Provider>
);
};
export default JobContext;

44
src/job-core/JobCore.jsx Normal file
View file

@ -0,0 +1,44 @@
import { useContext } from "react";
import { v4 as uuidv4 } from "uuid";
import JobContext from "@qltr/jobctx";
import Initiator from "@qltr/initiator";
import { useOneshotCore } from "./OneshotCore.jsx";
import { usePipelineCore } from "./PipelineCore.jsx";
import { useJobExtra } from "./JobExtra.jsx";
import { jobStatus, socketUrl } from "./job-config.js";
export function useJobCore() {
const { state, jobUpdate, jobCreate, jobDelete } = useContext(JobContext);
const { pipelineStart, pipelineCancel, pipelineDestroy } = usePipelineCore();
const { oneshotStart, oneshotCancel, oneshotDestroy } = useOneshotCore();
const jobExtra = useJobExtra();
function retryAll(failing) {
console.log("Would retry all failing tests!");
}
function jobCompose(builderCache) {
if (builderCache.tree) return pipelineStart(builderCache);
return oneshotStart(builderCache);
}
return {
// Job Context
state,
// Job Core
jobCompose,
retryAll,
// Oneshot
oneshotStart,
oneshotCancel,
oneshotDestroy,
// Pipeline
pipelineCancel,
pipelineDestroy,
pipelineStart,
// Job Extra
...jobExtra,
};
}
export { jobStatus } from "./job-config.js";

81
src/job-core/JobExtra.jsx Normal file
View file

@ -0,0 +1,81 @@
import { useContext } from "react";
import { useNavigate } from "react-router-dom";
import JobContext from "@qltr/jobctx";
import { jobStatus } from "./job-config.js";
// Icons
import CheckIcon from "@mui/icons-material/Check";
import ClearIcon from "@mui/icons-material/Clear";
import ViewColumnIcon from "@mui/icons-material/ViewColumn";
import PendingIcon from "@mui/icons-material/Pending";
import VisibilityIcon from "@mui/icons-material/Visibility";
import DoNotDisturbIcon from "@mui/icons-material/DoNotDisturb";
import ReplayIcon from "@mui/icons-material/Replay";
function statusIcon(status) {
switch (status) {
case jobStatus.OK:
return <CheckIcon color="success" />;
case jobStatus.ERROR:
return <ClearIcon color="error" />;
case jobStatus.PENDING:
return <PendingIcon color="info" />;
case jobStatus.ACTIVE:
return <VisibilityIcon color="primary" />;
case jobStatus.CANCELED:
return <DoNotDisturbIcon color="warning" />;
case jobStatus.QUEUED:
return <ViewColumnIcon color="secondary" />;
default:
return <ReplayIcon />;
}
}
export function useJobExtra() {
const { state, jobUpdate, jobCreate, jobDelete } = useContext(JobContext);
const navigate = useNavigate();
function pipelineJobs(pl) {
return state.jobs.filter((j) => j.isPipeline && j.pipelineId === pl.id);
}
const jobIcon = ({ status }) => statusIcon(status);
function pipelineIcon(pl) {
const jobStatuses = pipelineJobs(pl).map(({ status }) => status);
if (jobStatuses.includes(jobStatus.ERROR))
return statusIcon(jobStatus.ERROR);
if (jobStatuses.includes(jobStatus.ACTIVE))
return statusIcon(jobStatus.ACTIVE);
if (jobStatuses.includes(jobStatus.PENDING))
return statusIcon(jobStatus.PENDING);
if (pl.isCanceled) return statusIcon(jobStatus.CANCELED);
if (jobStatuses.includes(jobStatus.OK)) return statusIcon(jobStatus.OK);
return statusIcon(jobStatus.QUEUED);
}
function selectedPipelineBranches(pl) {
return pl.branches.map((b) =>
b.filter((t) => pl.selectedBranches.find((b) => b.name == t.name))
);
}
function findPipelineJobByTestName(pl, testName) {
return pipelineJobs(pl).find((j) => j.branchId === testName);
}
// Nav
const toJob = (jobId) => navigate(`/qualiteer/jobs#job-${jobId}`);
const toPipeline = (plId) => navigate(`/qualiteer/jobs#pipeline-${plId}`);
const toJobs = () => navigate(`/qualiteer/jobs`);
return {
pipelineJobs,
jobIcon,
pipelineIcon,
selectedPipelineBranches,
findPipelineJobByTestName,
toJob,
toPipeline,
toJobs,
};
}

View file

@ -0,0 +1,63 @@
import { useContext } from "react";
import { v4 as uuidv4 } from "uuid";
import JobContext from "@qltr/jobctx";
import Initiator from "@qltr/initiator";
import { jobStatus, socketUrl } from "./job-config.js";
export function useOneshotCore() {
const { state, jobUpdate, jobCreate, jobDelete } = useContext(JobContext);
function oneshotCancel(jobId) {
const job = state.jobs.find((j) => j.jobId === jobId);
if (job.initiator.sk) job.initiator.sk.close();
job.status = jobStatus.CANCELED;
jobUpdate({ ...job }, jobId);
}
function oneshotDestroy(jobId) {
const job = state.jobs.find((j) => j.jobId === jobId);
const { status } = job;
const isTriggered = !!job.initiator.sk;
const isCompleted = status === jobStatus.OK || status === jobStatus.ERROR;
const isCanceled = status === jobStatus.CANCELED;
if (isTriggered && !isCompleted && !isCanceled) job.initiator.sk.close();
jobDelete(jobId);
}
function oneshotStart(builderCache) {
const { testNames, isTriage } = builderCache;
const initiator = new Initiator(socketUrl);
const jobId = uuidv4();
const job = {
name: jobId,
status: jobStatus.PENDING,
jobId,
isPipeline: false,
builderCache,
initiator,
};
const request = { testNames, type: "single", isTriage };
jobCreate(job);
const onLog = (d) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.log.push(d);
job.status = jobStatus.ACTIVE;
jobUpdate({ ...job }, jobId);
};
const onClose = (c) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.exitcode = c;
job.status = c === 0 ? jobStatus.OK : jobStatus.ERROR;
jobUpdate({ ...job }, jobId);
};
const started = initiator.newJob(request, onLog, onClose, () => {});
started.then(() => jobUpdate({ status: jobStatus.ACTIVE }, jobId));
return jobId;
}
return { oneshotStart, oneshotCancel, oneshotDestroy };
}

View file

@ -0,0 +1,110 @@
import { useContext } from "react";
import { v4 as uuidv4 } from "uuid";
import JobContext from "@qltr/jobctx";
import Initiator from "@qltr/initiator";
import { jobStatus, socketUrl } from "./job-config.js";
export function usePipelineCore() {
const { state, jobUpdate, jobCreate, jobDelete, updatePipelines } =
useContext(JobContext);
function pipelineJob(pl, plReq) {
const initiator = new Initiator(socketUrl);
const jobId = uuidv4();
const job = {
status: jobStatus.PENDING,
jobId,
isPipeline: true,
initiator,
pipelineId: pl.id,
branchId: plReq.pipeline.__test,
};
jobCreate(job);
const onLog = (d) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.log.push(d);
job.status = jobStatus.ACTIVE;
jobUpdate({ ...job }, jobId);
};
const onClose = (c) => {
const job = state.jobs.find((j) => j.jobId === jobId);
job.exitcode = c;
job.status = c === 0 ? jobStatus.OK : jobStatus.ERROR;
jobUpdate({ ...job }, jobId);
};
const onPipelineTrigger = (p) => {
const { triggers } = p;
for (var t in triggers) {
if (t === "__testDelay") continue;
const delay = triggers[t].__testDelay ?? 0;
delete triggers[t].__testDelay;
const plTrigger = { ...p, triggers: triggers[t], __test: t };
const jobReq = { ...plReq, pipeline: plTrigger };
const timer = setTimeout(() => pipelineJob(pl, jobReq), delay);
const triggerAt = Date.now() + delay;
pl.pendingTriggers.push({ testName: t, timer, triggerAt });
}
};
const started = initiator.newPipelineJob(
plReq,
onLog,
onClose,
() => {},
onPipelineTrigger
);
started.then(() => jobUpdate({ status: jobStatus.ACTIVE }, jobId));
}
function pipelineStart(builderCache) {
const { tree, branches, selectedBranches, triageFailing } = builderCache;
const __test = Object.keys(tree)[0];
const plReq = {
pipeline: { __test, triggers: { ...tree[__test] } },
isTriage: triageFailing,
};
const id = uuidv4();
const pipeline = { id, branches, pendingTriggers: [], selectedBranches };
const { pipelines } = state;
pipelines.push(pipeline);
updatePipelines([...pipelines]);
pipelineJob(pipeline, plReq);
return pipeline;
}
function pipelineCancel(pipelineId) {
const pipeline = state.pipelines.find((p) => p.id === pipelineId);
pipeline.isCanceled = true;
pipeline.pendingTriggers.forEach((t) => clearTimeout(t));
const jobs = state.jobs.filter(
(j) => j.isPipeline && j.pipelineId === pipelineId
);
for (var j of jobs) {
if (j.initiator.sk) j.initiator.sk.close();
j.status = jobStatus.CANCELED;
jobUpdate({ ...j }, j.jobId);
}
}
function pipelineDestroy(pipelineId) {
const pipelineIndex = state.pipelines.findIndex((p) => p.id === pipelineId);
const pipeline = state.pipelines[pipelineIndex];
pipeline.pendingTriggers.forEach((t) => clearTimeout(t));
const jobs = state.jobs.filter(
(j) => j.isPipeline && j.pipelineId === pipelineId
);
var isTriggered, isCompleted, isCanceled;
for (var j of jobs) {
isTriggered = !!j.initiator.sk;
isCompleted = status === jobStatus.OK || status === jobStatus.ERROR;
isCanceled = status === jobStatus.CANCELED;
if (isTriggered && !isCompleted && !isCanceled) j.initiator.sk.close();
jobDelete(j.jobId);
}
state.pipelines.splice(pipelineIndex, 1);
}
return { pipelineStart, pipelineCancel, pipelineDestroy };
}

View file

@ -0,0 +1,9 @@
export const jobStatus = {
OK: "o",
QUEUED: "q",
PENDING: "p",
CANCELED: "c",
ACTIVE: "a",
ERROR: "e",
};
export const socketUrl = "/";

View file

@ -1,5 +1,6 @@
import { useContext } from "react";
import JobContext, { jobStatus } from "@qltr/jobs";
import JobContext from "@qltr/jobctx";
import { jobStatus } from "../job-core/job-config.js";
import { useNavigate } from "react-router-dom";
import CheckIcon from "@mui/icons-material/Check";

View file

@ -1,4 +1,4 @@
import _ from "lodash";
import merge from "lodash.merge";
const nest = (arr) => {
const obj = {};
@ -8,7 +8,7 @@ const nest = (arr) => {
export const asTree = (branches) => {
const nests = branches.map((b) => nest(b));
return _.merge(...nests);
return merge(...nests);
};
export const asBranches = (array) => {

View file

@ -1,6 +1,6 @@
import { useContext, useState } from "react";
import { useCurrentlyFailing } from "@qltr/queries";
import JobContext from "@qltr/jobs";
import JobContext from "@qltr/jobctx";
import StoreContext from "@qltr/store";
import { Link, useLocation } from "react-router-dom";

View file

@ -1,6 +1,6 @@
import { useEffect, useContext } from "react";
import StoreContext from "@qltr/store";
import JobContext from "@qltr/jobs";
import JobContext from "@qltr/jobctx";
import CatalogBox from "./CatalogBox.jsx";
import CatalogSearch from "./CatalogSearch.jsx";
import { useCatalogTests } from "@qltr/queries";
@ -40,6 +40,7 @@ export default function Catalog() {
test.job = pipelineJob;
continue;
}
const job = jobState.jobs.find(
(j) => !j.isPipeline && j.builderCache.testNames.includes(test.name)
);

View file

@ -2,12 +2,7 @@ import React, { useState, useContext } from "react";
import { usePipelineMappings } from "@qltr/queries";
import StoreContext from "@qltr/store";
import JobContext, { jobStatus } from "@qltr/jobs";
import {
useJobIconState,
usePipelineIconState,
useJobNav,
} from "@qltr/util/JobTools";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import useMediaQuery from "@mui/material/useMediaQuery";
import { useTheme } from "@mui/material/styles";
@ -39,25 +34,24 @@ export default function CatalogBox(props) {
const { data: pipelineMappings, isLoading } = usePipelineMappings();
const { state: store } = useContext(StoreContext);
const { jobFactory } = useContext(JobContext);
const jobNav = useJobNav();
const { jobCompose, toPipeline, toJob, jobIcon, pipelineIcon } = useJobCore();
const [open, setOpen] = useState(false);
const toggleOpen = () => setOpen(!open);
const theme = useTheme();
const minifyActions = useMediaQuery(theme.breakpoints.down("sm"));
const navigateToJob = () => {
if (pipeline) return jobNav.toPipeline(pipeline.id);
jobNav.toJob(job.jobId);
if (pipeline) return toPipeline(pipeline.id);
toJob(job.jobId);
};
const runTest = () => {
if (isPipeline) return runPipelineTest();
const jobId = jobFactory({
const jobId = jobCompose({
testNames: [testName],
isTriage: store.triageFailing,
});
if (store.focusJob) jobNav.toJob(jobId);
if (store.focusJob) toJob(jobId);
};
const runPipelineTest = () => {
@ -74,8 +68,8 @@ export default function CatalogBox(props) {
selectedBranches: as1d(primaries),
isTriage: true,
};
const pipeline = jobFactory(builderCache);
if (store.focusJob) jobNav.toPipeline(pipeline.id);
const pipeline = jobCompose(builderCache);
if (store.focusJob) toPipeline(pipeline.id);
};
const jobOnClick = (e) => {
@ -86,10 +80,10 @@ export default function CatalogBox(props) {
navigateToJob();
};
function jobIcon() {
if (pipeline) return usePipelineIconState(pipeline);
function boxIcon() {
if (pipeline) return pipelineIcon(pipeline);
if (!job) return <PlayArrowIcon />;
return useJobIconState(job);
return jobIcon(job);
}
return (
@ -130,7 +124,7 @@ export default function CatalogBox(props) {
component="span"
onClick={jobOnClick}
>
{jobIcon()}
{boxIcon()}
</IconButton>
</Stack>
</AccordionSummary>

View file

@ -1,7 +1,6 @@
import { useState, useContext } from "react";
import { useCurrentlyFailing, useSilencedAlerts } from "@qltr/queries";
import JobContext from "@qltr/jobs";
import { useJobNav } from "@qltr/util/JobTools";
import JobContext from "@qltr/jobctx";
import SilenceDialog, { useSilenceDialog } from "../alerting/SilenceDialog.jsx";
import FailingBox from "./FailingBox.jsx";
import QuickSilence, { useQuickSilence } from "./QuickSilence.jsx";

View file

@ -1,12 +1,8 @@
import React, { useState, useContext } from "react";
import { usePipelineMappings, useIgnoreResult } from "@qltr/queries";
import StoreContext from "@qltr/store";
import JobContext, { jobStatus } from "@qltr/jobs";
import {
useJobIconState,
usePipelineIconState,
useJobNav,
} from "@qltr/util/JobTools";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import useMediaQuery from "@mui/material/useMediaQuery";
import { useTheme } from "@mui/material/styles";
import Accordion from "@mui/material/Accordion";
@ -63,9 +59,8 @@ export default function FailingBox(props) {
const runHistory = recentResults ? [...recentResults].reverse() : null;
const { data: pipelineMappings, isLoading } = usePipelineMappings();
const { jobFactory } = useContext(JobContext);
const { jobCompose, jobIcon, pipelineIcon } = useJobCore();
const { state: store, updateStore, removeFailure } = useContext(StoreContext);
const jobNav = useJobNav();
const theme = useTheme();
const minifyActions = useMediaQuery(theme.breakpoints.down("sm"));
const [open, setOpen] = useState(false);
@ -97,22 +92,22 @@ export default function FailingBox(props) {
selectedBranches: as1d(primaries),
isTriage: store.triageFailing,
};
const pipeline = jobFactory(builderCache);
if (store.focusJob) jobNav.toPipeline(pipeline.id);
const pipeline = jobCompose(builderCache);
if (store.focusJob) toPipeline(pipeline.id);
};
const retryTest = () => {
if (isPipeline) return retryPipelineTest();
const jobId = jobFactory({
const jobId = jobCompose({
testNames: [testName],
isTriage: store.triageFailing,
});
if (store.focusJob) jobNav.toJob(jobId);
if (store.focusJob) toJob(jobId);
};
const navigateToJob = () => {
if (pipeline) return jobNav.toPipeline(pipeline.id);
jobNav.toJob(job.jobId);
if (pipeline) return toPipeline(pipeline.id);
toJob(job.jobId);
};
const jobOnClick = () => {
@ -121,10 +116,10 @@ export default function FailingBox(props) {
navigateToJob();
};
function jobIcon() {
if (pipeline) return usePipelineIconState(pipeline);
function boxIcon() {
if (pipeline) return pipelineIcon(pipeline);
if (!job) return <ReplayIcon />;
return useJobIconState(job);
return jobIcon(job);
}
return (
@ -220,7 +215,7 @@ export default function FailingBox(props) {
</a>
<IconButton aria-label="retry" component="span" onClick={jobOnClick}>
{jobIcon()}
{boxIcon()}
</IconButton>
<IconButton

View file

@ -1,8 +1,7 @@
// React
import React, { useState, useContext } from "react";
import JobContext from "@qltr/jobs";
import StoreContext from "@qltr/store";
import { useJobNav } from "@qltr/util/JobTools";
import { useJobCore } from "@qltr/jobcore";
// Components
import Button from "@mui/material/Button";
@ -18,17 +17,16 @@ import ReplayIcon from "@mui/icons-material/Replay";
export default function FailingRetry(props) {
const { failing } = props;
const { state: jobState, retryAll } = useContext(JobContext);
const { state: jobState, retryAll, toJob } = useJobCore();
const { state: store } = useContext(StoreContext);
const [open, setOpen] = useState(false);
const jobNav = useJobNav();
const toggleOpen = () => setOpen(!open);
const dialogClose = (confirmed) => () => {
toggleOpen();
if (!confirmed) return;
const jobId = retryAll(failing);
if (!store.focusJob) return;
jobNav.toJob(jobId);
toJob(jobId);
};
if (!failing || failing.length === 0) return;
return (

View file

@ -1,6 +1,6 @@
import React from "react";
import { useJobIconState } from "@qltr/util/JobTools";
import { useJobCore } from "@qltr/jobcore";
import Accordion from "@mui/material/Accordion";
import AccordionSummary from "@mui/material/AccordionSummary";
import Typography from "@mui/material/Typography";
@ -10,8 +10,8 @@ import IconButton from "@mui/material/IconButton";
import Stack from "@mui/material/Stack";
export default function JobBox(props) {
const { jobIcon } = useJobCore();
const { job } = props;
const { name, status } = job;
return (
@ -30,7 +30,7 @@ export default function JobBox(props) {
</Typography>
<Stack sx={{ ml: "auto" }}>
<IconButton aria-label="retry" component="span">
{useJobIconState(job)}
{jobIcon(job)}
</IconButton>
</Stack>
</AccordionSummary>

View file

@ -1,5 +1,5 @@
import React from "react";
import { jobStatus } from "@qltr/jobs";
import { jobStatus } from "@qltr/jobcore";
import Box from "@mui/material/Box";
import Typography from "@mui/material/Typography";

View file

@ -1,7 +1,6 @@
import React, { useState, useContext } from "react";
import StoreContext from "@qltr/store";
import JobContext, { jobStatus } from "@qltr/jobs";
import { usePipelineIconState } from "@qltr/util/JobTools";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import Accordion from "@mui/material/Accordion";
import AccordionDetails from "@mui/material/AccordionDetails";
@ -21,7 +20,8 @@ import Stack from "@mui/material/Stack";
export default function JobPipelineBox(props) {
const { pipeline } = props;
const pipelineIcon = usePipelineIconState(pipeline);
const { pipelineIcon } = useJobCore();
const boxIcon = pipelineIcon(pipeline);
return (
<Accordion expanded={false} disableGutters={true} square>
@ -39,7 +39,7 @@ export default function JobPipelineBox(props) {
</Typography>
<Stack sx={{ ml: "auto" }}>
<IconButton aria-label="" component="span">
{pipelineIcon}
{boxIcon}
</IconButton>
</Stack>
</AccordionSummary>

View file

@ -1,13 +1,6 @@
import React, { useContext } from "react";
import { useNavigate } from "react-router-dom";
import JobContext, { jobStatus } from "@qltr/jobs";
import {
selectedPipelineBranches,
pipelineJobs,
findPipelineJobByTestName,
useJobIconState,
useJobNav,
} from "@qltr/util/JobTools";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import Box from "@mui/material/Box";
import AppBar from "@mui/material/AppBar";
@ -31,28 +24,29 @@ import DeleteIcon from "@mui/icons-material/Delete";
function JobPipelineDisplay(props) {
const { pipeline } = props;
const {
state: jobState,
pipelineCancel,
pipelineDestroy,
} = useContext(JobContext);
selectedPipelineBranches,
pipelineJobs,
findPipelineJobByTestName,
toJob,
jobIcon,
} = useJobCore();
const jobNav = useJobNav();
const nav = useNavigate();
const [anchorEl, setAnchorEl] = React.useState(null);
const open = Boolean(anchorEl);
const handleClick = (event) => {
setAnchorEl(event.currentTarget);
};
const handleClose = () => {
setAnchorEl(null);
};
const handleClick = (event) => setAnchorEl(event.currentTarget);
const handleClose = () => setAnchorEl(null);
const selectJob = (testName) => () => {
const job = findPipelineJobByTestName(pipeline, jobState.jobs, testName);
const job = findPipelineJobByTestName(pipeline, testName);
if (!job) return;
jobNav.toJob(job.jobId);
toJob(job.jobId);
};
function cancelPipeline() {
@ -69,16 +63,16 @@ function JobPipelineDisplay(props) {
};
function pipelineActive() {
return pipelineJobs(pipeline, jobState.jobs).find(
return pipelineJobs(pipeline).find(
(j) => j.status === jobStatus.ACTIVE || j.status === jobStatus.PENDING
);
}
function jobIcon(name) {
function boxIcon(name) {
if (pipeline.isCanceled) return <DoNotDisturbIcon color="warning" />;
const job = findPipelineJobByTestName(pipeline, jobState.jobs, name);
const job = findPipelineJobByTestName(pipeline, name);
if (!job) return <ViewColumnIcon color="secondary" />;
return useJobIconState(job);
return jobIcon(job);
}
return (
@ -133,7 +127,7 @@ function JobPipelineDisplay(props) {
</Typography>
<Stack sx={{ ml: "auto" }}>
<IconButton aria-label="retry" component="span">
{jobIcon(test.name)}
{boxIcon(test.name)}
</IconButton>
</Stack>
</AccordionSummary>

View file

@ -1,6 +1,6 @@
import React, { useContext, useState, useEffect } from "react";
import { useNavigate } from "react-router-dom";
import JobContext, { jobStatus } from "@qltr/jobs";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import StoreContext from "@qltr/store";
import Box from "@mui/material/Box";
import AppBar from "@mui/material/AppBar";
@ -25,16 +25,12 @@ import ViewColumnIcon from "@mui/icons-material/ViewColumn";
export default function JobPipelinePendingView(props) {
const navigate = useNavigate();
const { job } = props;
const { jobFactory, jobCancel, jobDestroy } = useContext(JobContext);
const { jobCompose, jobCancel, jobDestroy } = useJobCore();
const { state: store } = useContext(StoreContext);
const [anchorEl, setAnchorEl] = React.useState(null);
const open = Boolean(anchorEl);
const handleClick = (event) => {
setAnchorEl(event.currentTarget);
};
const handleClose = () => {
setAnchorEl(null);
};
const handleClick = (event) => setAnchorEl(event.currentTarget);
const handleClose = () => setAnchorEl(null);
function download(filename, text) {
var element = document.createElement("a");
@ -50,7 +46,7 @@ export default function JobPipelinePendingView(props) {
}
function retryJob() {
const jobId = jobFactory(job.builderCache);
const jobId = jobCompose(job.builderCache);
if (store.focusJob) navigate(`/qualiteer/jobs#${jobId}`);
}

View file

@ -1,7 +1,6 @@
import React, { useContext, useState, useEffect } from "react";
import { useJobNav } from "@qltr/util/JobTools";
import { useNavigate } from "react-router-dom";
import JobContext, { jobStatus } from "@qltr/jobs";
import { useJobCore, jobStatus } from "@qltr/jobcore";
import StoreContext from "@qltr/store";
import Box from "@mui/material/Box";
import AppBar from "@mui/material/AppBar";
@ -25,18 +24,14 @@ import ViewColumnIcon from "@mui/icons-material/ViewColumn";
export default function JobView(props) {
const { job } = props;
const { jobFactory, jobCancel, jobDestroy } = useContext(JobContext);
const { jobCompose, jobCancel, jobDestroy, toPipeline, toJob } = useJobCore();
const { state: store } = useContext(StoreContext);
const jobNav = useJobNav();
const nav = useNavigate();
const [anchorEl, setAnchorEl] = React.useState(null);
const open = Boolean(anchorEl);
const handleClick = (event) => {
setAnchorEl(event.currentTarget);
};
const handleClose = () => {
setAnchorEl(null);
};
const handleClick = (event) => setAnchorEl(event.currentTarget);
const handleClose = () => setAnchorEl(null);
function download(filename, text) {
var element = document.createElement("a");
@ -52,8 +47,8 @@ export default function JobView(props) {
}
function retryJob() {
const jobId = jobFactory(job.builderCache);
if (store.focusJob) jobNav.toJob(jobId);
const jobId = jobCompose(job.builderCache);
if (store.focusJob) toJob(jobId);
}
function downloadLog() {
@ -76,8 +71,8 @@ export default function JobView(props) {
};
function navigateToJobs() {
if (job.isPipeline) return jobNav.toPipeline(job.pipelineId);
jobNav.toJobs();
if (job.isPipeline) return toPipeline(job.pipelineId);
toJobs();
}
return (

View file

@ -1,7 +1,7 @@
import React, { useContext, useEffect } from "react";
import { useLocation, useNavigate } from "react-router-dom";
import JobContext from "@qltr/jobs";
import JobContext from "@qltr/jobctx";
import JobBox from "./JobBox.jsx";
import JobPipelineBox from "./JobPipelineBox.jsx";
import JobView from "./JobView.jsx";

View file

@ -1,7 +1,6 @@
import React, { useContext, useState } from "react";
import StoreContext from "@qltr/store";
import JobContext from "@qltr/jobs";
import { useJobNav } from "@qltr/util/JobTools";
import { useJobCore } from "@qltr/jobcore";
import Dialog from "@mui/material/Dialog";
import Toolbar from "@mui/material/Toolbar";
@ -28,8 +27,7 @@ import PipelineConfirm from "./PipelineConfirm.jsx";
export default function JobBuilder() {
const { state: store } = useContext(StoreContext);
const { jobFactory } = useContext(JobContext);
const jobNav = useJobNav();
const { jobCompose } = useJobCore();
const [quickOpen, setQuickOpen] = useState(false);
const [jobDialogOpen, setJobDialogOpen] = useState(false);
@ -52,8 +50,8 @@ export default function JobBuilder() {
const handleClose = (confirmed) => () => {
setJobDialogOpen(false);
if (!confirmed) return;
const jobId = jobFactory({ ...cache, isTriage: store.triageFailing });
if (store.focusJob) jobNav.toJob(jobId);
const jobId = jobCompose({ ...cache, isTriage: store.triageFailing });
if (store.focusJob) toJob(jobId);
};
// Pull info from url if possible?

View file

@ -1,6 +1,6 @@
export default function executorConfig(payload) {
return {
command: ({ command }) => command,
command: ({jobRequest}) => ["echo", "hello"],
url: ({ url }) => url,
jobId: ({ jobId }) => jobId,
};

View file

@ -27,11 +27,13 @@ export default () => {
alias: {
"@qltr/util": path.resolve("./src/util/"),
"@qltr/queries": path.resolve("./src/util/queries"),
"@qltr/joins": path.resolve(`./src/util/Joins.jsx`),
"@qltr/jobs": path.resolve("./src/ctx/JobContext.jsx"),
"@qltr/jobcore": path.resolve("./src/job-core/JobCore.jsx"),
"@qltr/jobctx": path.resolve("./src/ctx/JobContext.jsx"),
"@qltr/store": path.resolve("./src/ctx/StoreContext.jsx"),
"@qltr/initiator": path.resolve("./lib/sockets/clients/Initiator.js"),
"@qltr/mocks": path.resolve("./lib/database/mocks/"),
"@qltr/initiator": path.resolve(
"./lib/common/sockets/clients/Initiator.js"
),
"@qltr/mocks": path.resolve("./lib/server/database/mocks/"),
},
},
});