Refactor backend
This commit is contained in:
parent
fd497c0e23
commit
7027feb8ac
20 changed files with 75 additions and 369 deletions
|
@ -1,61 +0,0 @@
|
||||||
import { v4 } from "uuid";
|
|
||||||
import applyJob from "./kubernetes.js";
|
|
||||||
import buildJob from "./job-builder.js";
|
|
||||||
|
|
||||||
const maxJobs = process.env.MAX_JOBS ? parseInt(process.env.MAX_JOBS) : 3;
|
|
||||||
|
|
||||||
class JobManager {
|
|
||||||
constructor() {
|
|
||||||
this.clientMaxJobs = maxJobs;
|
|
||||||
this.clients = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
getJob(clientId, jobId) {
|
|
||||||
return this.clients[clientId].jobs.find((j) => j.id === jobId);
|
|
||||||
}
|
|
||||||
|
|
||||||
getJobById(jobId) {
|
|
||||||
for (var client of Object.values(this.clients)) {
|
|
||||||
const job = client.jobs.find((j) => j.id === jobId);
|
|
||||||
if (!job) continue;
|
|
||||||
return job;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pushLog(jobId, log) {
|
|
||||||
const job = this.getJobById(jobId);
|
|
||||||
if (log instanceof Array) job.log.push(...log);
|
|
||||||
else job.log.push(log);
|
|
||||||
}
|
|
||||||
|
|
||||||
closeJob(jobId, exitcode) {
|
|
||||||
const job = this.getJobById(jobId);
|
|
||||||
job.exitcode = exitcode;
|
|
||||||
}
|
|
||||||
|
|
||||||
newJob(jobRequest, id) {
|
|
||||||
if (!jobRequest) throw Error("Request Must Be Object!");
|
|
||||||
if (!this.clients[id]) this.clients[id] = { jobs: [] };
|
|
||||||
const client = this.clients[id];
|
|
||||||
if (
|
|
||||||
client.jobs.filter((j) => j.exitcode === undefined).length >=
|
|
||||||
this.clientMaxJobs
|
|
||||||
)
|
|
||||||
throw Error("Client's Active Jobs Exceeded!");
|
|
||||||
|
|
||||||
const job = buildJob(jobRequest, id);
|
|
||||||
job.id = v4();
|
|
||||||
job.log = [];
|
|
||||||
this.clients[id].jobs.push(job);
|
|
||||||
applyJob(job);
|
|
||||||
return { ...job };
|
|
||||||
}
|
|
||||||
|
|
||||||
removeJob(clientId, id) {
|
|
||||||
this.clients[clientId].jobs = this.clients[clientId].jobs.filter(
|
|
||||||
(j) => j.id !== id
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default new JobManager();
|
|
|
@ -1,13 +1,15 @@
|
||||||
// Imports
|
// Imports
|
||||||
import fig from "figlet";
|
import fig from "figlet";
|
||||||
import http from "http";
|
import http from "http";
|
||||||
|
import express from "express";
|
||||||
import { INFO, OK, logInfo } from "../util/logging.js";
|
import { INFO, OK, logInfo } from "../util/logging.js";
|
||||||
|
|
||||||
// Import Core Modules
|
// Import Core Modules
|
||||||
import expressApp from "./server.js";
|
import buildRoutes from "../routes/router.js";
|
||||||
import applySockets from "../sockets/handler.js";
|
import buildPostgres from "../database/postgres.js";
|
||||||
import jobManager from "./JobManager.js";
|
import injectSockets from "../sockets/socket-server.js";
|
||||||
import getRabbiteer from "../rabbit/rabbit-workers.js";
|
import JobManager from "../jobs/JobManager.js";
|
||||||
|
import buildRabbiteer from "../rabbit/rabbit-workers.js";
|
||||||
|
|
||||||
// Constants
|
// Constants
|
||||||
const title = "QLTR";
|
const title = "QLTR";
|
||||||
|
@ -17,24 +19,32 @@ const port = process.env.QUALITEER_DEV_PORT ?? 52000;
|
||||||
export default class Qualiteer {
|
export default class Qualiteer {
|
||||||
constructor(options = {}) {
|
constructor(options = {}) {
|
||||||
for (var k in options) this[k] = options[k];
|
for (var k in options) this[k] = options[k];
|
||||||
this.jobs = jobManager;
|
this.jobs = JobManager;
|
||||||
this.port = options.port ?? port;
|
this.port = options.port ?? port;
|
||||||
}
|
}
|
||||||
|
|
||||||
async _preinitialize() {
|
async _preinitialize() {
|
||||||
logInfo(fig.textSync(title, "Cyberlarge"));
|
logInfo(fig.textSync(title, "Cyberlarge"));
|
||||||
INFO("INIT", "Initializing...");
|
INFO("INIT", "Initializing...");
|
||||||
this.app = expressApp;
|
this.app = express();
|
||||||
|
this.pg = buildPostgres();
|
||||||
this.server = http.createServer(this.app);
|
this.server = http.createServer(this.app);
|
||||||
this.sockets = applySockets(this.server, this.jobs);
|
this.sockets = injectSockets(this.server, this.jobs);
|
||||||
this.app.set("socketio", this.sockets);
|
this.routes = buildRoutes(this.pg, this.sockets);
|
||||||
this.rabbiteer = getRabbiteer(this.sockets);
|
this.rabbiteer = buildRabbiteer(this.pg, this.sockets);
|
||||||
|
this.app.use(this.routes);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _connect() {
|
||||||
|
await this.pg.connect();
|
||||||
|
// await this.rabbiteer.connect();
|
||||||
}
|
}
|
||||||
|
|
||||||
start() {
|
start() {
|
||||||
const qt = this;
|
const qt = this;
|
||||||
return new Promise(async function init(res) {
|
return new Promise(async function init(res) {
|
||||||
await qt._preinitialize();
|
qt._preinitialize();
|
||||||
|
await qt._connect();
|
||||||
qt.server.listen(qt.port, function onStart() {
|
qt.server.listen(qt.port, function onStart() {
|
||||||
OK("SERVER", `Running on ${qt.port}`);
|
OK("SERVER", `Running on ${qt.port}`);
|
||||||
res();
|
res();
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
import Executor from "../sockets/clients/Executor.js";
|
|
||||||
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
const url = args[0];
|
|
||||||
const jobId = args[1];
|
|
||||||
const command = args.slice(2);
|
|
||||||
const job = { id: jobId, command };
|
|
||||||
const exec = new Executor(url, job, command);
|
|
||||||
exec.runJob();
|
|
|
@ -1,15 +0,0 @@
|
||||||
import { INFO, ERR, OK, VERB } from "../util/logging.js";
|
|
||||||
import Executor from "../sockets/clients/Executor.js";
|
|
||||||
import cp from "node:child_process";
|
|
||||||
|
|
||||||
const jobStr = process.argv.slice(2)[0];
|
|
||||||
const job = JSON.parse(jobStr);
|
|
||||||
const { command } = job.spec.template.spec.containers[0];
|
|
||||||
INFO("EXEC", "Internal Executor Starting!");
|
|
||||||
cp.exec(command, (error, stdout, stderr) => {
|
|
||||||
if (error) ERR("EXEC", error);
|
|
||||||
//if(stdout) VERB("EXEC-STDOUT", stdout);
|
|
||||||
//if(stderr) VERB("EXEC-STDERR", stderr);
|
|
||||||
OK("EXEC", "Internal Executor Finished!");
|
|
||||||
process.exit(error ? 1 : 0);
|
|
||||||
});
|
|
|
@ -1,59 +0,0 @@
|
||||||
const baseCommand = "node";
|
|
||||||
const suiteEntry = "dev/suite/runner.js";
|
|
||||||
const pipelineMapping = [
|
|
||||||
{
|
|
||||||
id: 0,
|
|
||||||
pipeline: [{ name: "primary" }, { name: "secondary", delay: 5000 }],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const buildCommon = (jobRequest) => {
|
|
||||||
const { testName } = jobRequest;
|
|
||||||
if (!testName) throw Error("'testName' must be provided!");
|
|
||||||
const command = [baseCommand, suiteEntry, `test=${testName}`];
|
|
||||||
|
|
||||||
// Apply Common Flags
|
|
||||||
command.push("isRetry=false");
|
|
||||||
|
|
||||||
// Return new request
|
|
||||||
return { ...jobRequest, command };
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildSingle = (jobReq) => jobReq;
|
|
||||||
|
|
||||||
const buildMarker = (jobReq) => {};
|
|
||||||
|
|
||||||
const buildProject = (jobReq) => {};
|
|
||||||
|
|
||||||
const pipelineMaxLife = (testName) => {
|
|
||||||
const pipelines = pipelineMapping
|
|
||||||
.filter((m) => m.pipeline.find((t) => t.name === testName))
|
|
||||||
.map((m) => m.pipeline);
|
|
||||||
return Math.max(pipelines.map((p) => p.length)) + 1;
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildCompound = (jobReq, socketId) => {
|
|
||||||
const { testName, command } = jobReq;
|
|
||||||
const pipelineTriggers = jobReq.pipelineTriggers;
|
|
||||||
if (pipelineTriggers) command.push(`pipelineTriggers=${pipelineTriggers}`);
|
|
||||||
command.push(`pipelineDashboardSocket=${socketId}`);
|
|
||||||
return { ...jobReq, command };
|
|
||||||
};
|
|
||||||
|
|
||||||
const nextCompound = (previousTest) => {};
|
|
||||||
|
|
||||||
export default function jobBuilder(jobRequest, id) {
|
|
||||||
const jobReq = buildCommon(jobRequest, id);
|
|
||||||
switch (jobRequest.type) {
|
|
||||||
case "single":
|
|
||||||
return buildSingle(jobReq);
|
|
||||||
case "marker":
|
|
||||||
return buildMarker(jobReq);
|
|
||||||
case "project":
|
|
||||||
return buildProject(jobReq);
|
|
||||||
case "compound":
|
|
||||||
return buildCompound(jobReq, id);
|
|
||||||
default:
|
|
||||||
throw Error("No Job Request Type Specified!");
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,33 +0,0 @@
|
||||||
{
|
|
||||||
"apiVersion": "batch/v1",
|
|
||||||
"kind": "Job",
|
|
||||||
"metadata": {
|
|
||||||
"name": "qltr-job-test-suite-1"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"template": {
|
|
||||||
"spec": {
|
|
||||||
"containers": [
|
|
||||||
{
|
|
||||||
"resources": {
|
|
||||||
"requests": {
|
|
||||||
"memory": "64MI",
|
|
||||||
"cpu": "250m"
|
|
||||||
},
|
|
||||||
"limits": {
|
|
||||||
"memory": "128MI",
|
|
||||||
"cpu": "500m"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"name": "qltr-job-test-suite-1",
|
|
||||||
"image": "node",
|
|
||||||
"imagePullPolicy": "Always",
|
|
||||||
"command": ["node", "--version"]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"restartPolicy": "Never"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"backoffLimit": 4
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,84 +0,0 @@
|
||||||
import cp from "child_process";
|
|
||||||
import fs from "fs";
|
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
const internalDeploy = process.env.INTERNAL_DEPLOY === "true";
|
|
||||||
const executorUrl = process.env.EXECUTOR_URL;
|
|
||||||
const executorScriptOnly = process.env.EXECUTOR_SCRIPT_ONLY === "true";
|
|
||||||
const executorBin =
|
|
||||||
process.env.EXECUTOR_BIN ?? `qltr-executor${executorScriptOnly ? ".js" : ""}`;
|
|
||||||
|
|
||||||
const qualiteerUrl =
|
|
||||||
process.env.QUALITEER_URL ?? "file:///home/runner/Qualiteer/bin/executor";
|
|
||||||
|
|
||||||
const kubCmd = "kubectl apply -f";
|
|
||||||
const jobsDir = "jobs/";
|
|
||||||
const defaults = JSON.parse(
|
|
||||||
fs.readFileSync(path.resolve("./lib/core/k8s-job.json"))
|
|
||||||
);
|
|
||||||
|
|
||||||
const wrapCommand = (jobId, command) => {
|
|
||||||
const bin = executorScriptOnly
|
|
||||||
? `node ${executorBin}`
|
|
||||||
: `chmod +x ${executorBin} && ./${executorBin}`;
|
|
||||||
const cmd = command.map((arg) => JSON.stringify(arg));
|
|
||||||
const curlCmd = `curl -o qltr-executor ${executorUrl} && ${bin} ${qualiteerUrl} ${jobId} ${cmd.join(
|
|
||||||
" "
|
|
||||||
)}`;
|
|
||||||
return curlCmd;
|
|
||||||
};
|
|
||||||
|
|
||||||
const createFile = (job) => {
|
|
||||||
const { name } = job.metadata;
|
|
||||||
const jobsPath = path.resolve(jobsDir);
|
|
||||||
if (!fs.existsSync(jobsPath)) fs.mkdirSync(jobsPath);
|
|
||||||
const filePath = path.resolve(jobsDir, `${name}.json`);
|
|
||||||
fs.writeFileSync(filePath, JSON.stringify(job));
|
|
||||||
return filePath;
|
|
||||||
};
|
|
||||||
|
|
||||||
const applyFileInternally = (filePath) => {
|
|
||||||
const job = fs.readFileSync(filePath, { encoding: "utf8" });
|
|
||||||
cp.fork(path.resolve("./lib/core/internal-deploy.js"), [job]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const applyFile = async (filePath) => {
|
|
||||||
const command = `${kubCmd} ${filePath}`;
|
|
||||||
return new Promise((res, rej) =>
|
|
||||||
cp.exec(command, (err, stdout, stderr) => (err && rej(err)) || res(stdout))
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const deleteFile = (filePath) => fs.unlinkSync(filePath);
|
|
||||||
|
|
||||||
const jobBuilder = (jobRequest) => {
|
|
||||||
const { resources, name, image, command, id: jobId } = jobRequest;
|
|
||||||
|
|
||||||
// Safety Checks
|
|
||||||
if (!jobId) throw Error("'jobId' required!");
|
|
||||||
if (!name) throw Error("'name' required!");
|
|
||||||
if (!command) throw Error("'command' required!");
|
|
||||||
if (!image) throw Error("'image' required!");
|
|
||||||
|
|
||||||
if (!Array.isArray(command)) throw Error("'command' must be an array!");
|
|
||||||
|
|
||||||
// Apply configuration
|
|
||||||
const job = { ...defaults };
|
|
||||||
job.metadata.name = `qltr-${name}-${jobId}`;
|
|
||||||
const container = job.spec.template.spec.containers[0];
|
|
||||||
container.name = job.metadata.name;
|
|
||||||
container.command = wrapCommand(jobId, command);
|
|
||||||
container.image = JSON.stringify(image);
|
|
||||||
|
|
||||||
// Apply resources
|
|
||||||
job.resources = { ...job.resources, ...resources };
|
|
||||||
return job;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default async function createJob(jobRequest) {
|
|
||||||
const job = jobBuilder(jobRequest);
|
|
||||||
const filePath = createFile(job);
|
|
||||||
if (!internalDeploy) await applyFile(filePath);
|
|
||||||
else await applyFileInternally(filePath);
|
|
||||||
deleteFile(filePath);
|
|
||||||
}
|
|
|
@ -1,28 +0,0 @@
|
||||||
// Imports
|
|
||||||
import express from "express";
|
|
||||||
|
|
||||||
// Routes
|
|
||||||
import results from "../routes/results-route.js";
|
|
||||||
import alerting from "../routes/alerting-route.js";
|
|
||||||
import react from "../routes/react-route.js";
|
|
||||||
import catalog from "../routes/catalog-route.js";
|
|
||||||
import jobs from "../routes/jobs-route.js";
|
|
||||||
|
|
||||||
import mock from "../routes/mock-route.js";
|
|
||||||
import dev from "../routes/dev-route.js";
|
|
||||||
const app = express();
|
|
||||||
// Special Routes
|
|
||||||
app.all("/", (req, res) => res.redirect("/qualiteer"));
|
|
||||||
if (process.env.MOCK_ROUTES === "true") app.use(mock);
|
|
||||||
if (process.env.USE_DEV_ROUTER === "true") app.use("/api/dev", dev);
|
|
||||||
|
|
||||||
// Middlewares
|
|
||||||
|
|
||||||
// Routes
|
|
||||||
app.use(react); // Static Build Route
|
|
||||||
app.use("/api/results", results);
|
|
||||||
app.use("/api/alerting", alerting);
|
|
||||||
app.use("/api/catalog", catalog);
|
|
||||||
app.use("/api/jobs", jobs);
|
|
||||||
|
|
||||||
export default app;
|
|
|
@ -31,17 +31,23 @@ const dbConfig = {
|
||||||
|
|
||||||
const migrationsDir = "lib/database/migrations";
|
const migrationsDir = "lib/database/migrations";
|
||||||
|
|
||||||
const configure = async () => {
|
const queryMock = (str) => INFO("POSTGRES MOCK", str);
|
||||||
|
|
||||||
|
const connect = (pg) => async () => {
|
||||||
if (pgDisabled) {
|
if (pgDisabled) {
|
||||||
WARN("POSTGRES", "Postgres Disabled!");
|
WARN("POSTGRES", "Postgres Disabled!");
|
||||||
return { query: (str) => INFO("POSTGRES MOCK", str) };
|
return { query: queryMock };
|
||||||
}
|
}
|
||||||
await migrate(dbConfig, migrationsDir);
|
await migrate(dbConfig, migrationsDir);
|
||||||
// Override the global variable DB
|
// Override the global variable DB
|
||||||
const pg = pgp(dbConfig);
|
pg = pgp(dbConfig);
|
||||||
await pg.connect();
|
await pg.connect();
|
||||||
OK("POSTGRES", `Connected to database ${database}!`);
|
OK("POSTGRES", `Connected to database ${database}!`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const buildPostgres = () => {
|
||||||
|
var pg = { query: queryMock, connect: connect(pg) };
|
||||||
return pg;
|
return pg;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default await configure();
|
export default buildPostgres;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import Rabbiteer from "rabbiteer";
|
import Rabbiteer from "rabbiteer";
|
||||||
import getWorkers from "./workers/index.js";
|
import buildWorkers from "./workers/index.js";
|
||||||
|
|
||||||
// Pull Environment Variables
|
// Pull Environment Variables
|
||||||
const { RABBIT_HOST: host, RABBIT_USER: user, RABBIT_PASS: pass } = process.env;
|
const { RABBIT_HOST: host, RABBIT_USER: user, RABBIT_PASS: pass } = process.env;
|
||||||
|
@ -11,7 +11,7 @@ const rabbitConfig = {
|
||||||
pass: pass ?? "rabbit",
|
pass: pass ?? "rabbit",
|
||||||
};
|
};
|
||||||
|
|
||||||
const getRabbiteer = (skio) =>
|
const buildRabbiteer = (skio) =>
|
||||||
new Rabbiteer(null, getWorkers(skio), { autoRabbit: rabbitConfig });
|
new Rabbiteer(null, buildWorkers(skio), { autoRabbit: rabbitConfig });
|
||||||
|
|
||||||
export default getRabbiteer;
|
export default buildRabbiteer;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import TestResultsWorker from "./TestResultsWorker.js";
|
import TestResultsWorker from "./TestResultsWorker.js";
|
||||||
|
|
||||||
const getWorkers = (skio) => [new TestResultsWorker(skio)];
|
const buildWorkers = (skio) => [new TestResultsWorker(skio)];
|
||||||
export default getWorkers;
|
export default buildWorkers;
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
import { Router, json as jsonMiddleware } from "express";
|
||||||
import TestResultsWorker from "../rabbit/workers/TestResultsWorker.js";
|
import TestResultsWorker from "../rabbit/workers/TestResultsWorker.js";
|
||||||
|
|
||||||
|
export default function buildDevRoute(pg, skio) {
|
||||||
const router = Router();
|
const router = Router();
|
||||||
router.use(jsonMiddleware());
|
router.use(jsonMiddleware());
|
||||||
router.post("/rabbit/TestResults", (req, res) => {
|
router.post("/rabbit/TestResults", (req, res) => {
|
||||||
const { testResult } = req.body;
|
const { testResult } = req.body;
|
||||||
var io = req.app.get("socketio");
|
new TestResultsWorker(skio).onMessage(testResult);
|
||||||
new TestResultsWorker(io).onMessage(testResult);
|
|
||||||
res.sendStatus(200);
|
res.sendStatus(200);
|
||||||
});
|
});
|
||||||
|
return router;
|
||||||
export default router;
|
}
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
import { Router, json as jsonMiddleware } from "express";
|
|
||||||
import jobs from "../core/JobManager.js";
|
|
||||||
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
router.get("/jobs", (req, res) => {
|
|
||||||
const { clients } = jobs;
|
|
||||||
const allJobs = [];
|
|
||||||
for (var c of clients) allJobs.push(...c.jobs);
|
|
||||||
res.json(allJobs);
|
|
||||||
});
|
|
||||||
export default router;
|
|
|
@ -1,37 +0,0 @@
|
||||||
import { Router } from "express";
|
|
||||||
import { readFileSync } from "fs";
|
|
||||||
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
const catalog = "lib/routes/mocks/catalog.json";
|
|
||||||
const alerting = "lib/routes/mocks/alerting.json";
|
|
||||||
const results = "lib/routes/mocks/results.json";
|
|
||||||
|
|
||||||
const query = async (mock) => JSON.parse(readFileSync(mock));
|
|
||||||
|
|
||||||
// Queries
|
|
||||||
router.get("/api/catalog/tests", (req, res) => {
|
|
||||||
query(catalog).then((catalog) => {
|
|
||||||
res.json(req.get("full") ? catalog["tests:full"] : catalog.tests);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get("/api/results/failing", async (req, res) => {
|
|
||||||
query(results).then(async (results) => {
|
|
||||||
if (req.get("count")) res.json({ failing: results.results.length });
|
|
||||||
else if (!req.get("full")) res.json(results.results);
|
|
||||||
else
|
|
||||||
query(catalog).then((catalog) => {
|
|
||||||
res.json(
|
|
||||||
results.results.map((r) => ({
|
|
||||||
...catalog["tests:full"].find((t) => t.name === r.name),
|
|
||||||
...r,
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mutations
|
|
||||||
|
|
||||||
export default router;
|
|
28
lib/routes/router.js
Normal file
28
lib/routes/router.js
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
// Imports
|
||||||
|
import express from "express";
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
import results from "../routes/results-route.js";
|
||||||
|
import alerting from "../routes/alerting-route.js";
|
||||||
|
import react from "../routes/react-route.js";
|
||||||
|
import catalog from "../routes/catalog-route.js";
|
||||||
|
|
||||||
|
import buildDevRoute from "../routes/dev-route.js";
|
||||||
|
|
||||||
|
export default function buildRoutes(pg, skio) {
|
||||||
|
const router = express.Router();
|
||||||
|
// Special Routes
|
||||||
|
router.all("/", (req, res) => res.redirect("/qualiteer"));
|
||||||
|
if (process.env.USE_DEV_ROUTER === "true")
|
||||||
|
router.use("/api/dev", buildDevRoute(pg, skio));
|
||||||
|
|
||||||
|
// Middlewares
|
||||||
|
|
||||||
|
// Routes
|
||||||
|
router.use(react); // Static Build Route
|
||||||
|
router.use("/api/results", results);
|
||||||
|
router.use("/api/alerting", alerting);
|
||||||
|
router.use("/api/catalog", catalog);
|
||||||
|
|
||||||
|
return router;
|
||||||
|
}
|
|
@ -2,7 +2,7 @@ import { Server as Skio } from "socket.io";
|
||||||
import evt from "./events.js";
|
import evt from "./events.js";
|
||||||
import modes from "./modes.js";
|
import modes from "./modes.js";
|
||||||
|
|
||||||
import { initiator, executor, viewer } from "./modifiers.js";
|
import { initiator, executor, viewer } from "./client-listeners.js";
|
||||||
|
|
||||||
const socketDrop = (io, room, id) => {
|
const socketDrop = (io, room, id) => {
|
||||||
const { rooms } = io.of("/").adapter;
|
const { rooms } = io.of("/").adapter;
|
Loading…
Add table
Add a link
Reference in a new issue