Upgrades people!

This commit is contained in:
Dunemask 2022-08-09 04:29:10 +00:00
parent f84234150f
commit 8ad56e8d38
40 changed files with 483 additions and 379 deletions

View file

@ -10,13 +10,15 @@ const table = "silenced_tests";
const PG_DISABLED = process.env.POSTGRES_DISABLED;
const silencedMock = () => {
return [{
name: `failing`,
class: `failing.js`,
method: "FAKEMETHOD",
id: 0,
silencedUntil: new Date().toJSON(),
}]
return [
{
name: `failing`,
class: `failing.js`,
method: "FAKEMETHOD",
id: 0,
silencedUntil: new Date().toJSON(),
},
];
};
// Queries

View file

@ -20,12 +20,12 @@ const testsMock = () => {
isCompound: false,
type: "api",
description: "This is a single test",
tags: ["cron_1hour","reg_us", "env_ci", "proj_core", "skip_alt"],
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "skip_alt"],
path: "tests/assets/suite/single.js",
created: Date.now(),
mergeRequest: "https://example.com"
mergeRequest: "https://example.com",
},
{
{
id: 1,
name: "failing",
class: "failing.js",
@ -33,12 +33,12 @@ const testsMock = () => {
isCompound: false,
type: "ui",
description: "This is a failing test",
tags: ["cron_1hour","reg_us", "env_ci", "proj_core"],
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
path: "tests/assets/suite/failing.js",
created: Date.now(),
mergeRequest: "https://example.com"
mergeRequest: "https://example.com",
},
{
{
id: 2,
name: "primary",
class: "primary.js",
@ -46,11 +46,18 @@ const testsMock = () => {
isCompound: true,
type: "api",
description: "This is a primary test",
tags: ["cron_1hour","reg_us", "proj_core", "skip_alt", "compound_secondary"],
tags: [
"cron_1hour",
"reg_us",
"proj_core",
"skip_alt",
"compound_secondary",
],
path: "tests/assets/suite/primary.js",
created: Date.now(),
mergeRequest: "https://example.com"
}, {
mergeRequest: "https://example.com",
},
{
id: 3,
name: "secondary",
class: "secondary.js",
@ -58,12 +65,12 @@ const testsMock = () => {
isCompound: true,
type: "api",
description: "This is a secondary test",
tags: ["cron_1hour","reg_us", "proj_core", "compound_tertiary"],
tags: ["cron_1hour", "reg_us", "proj_core", "compound_tertiary"],
path: "tests/assets/suite/secondary.js",
created: Date.now(),
mergeRequest: "https://example.com"
mergeRequest: "https://example.com",
},
{
{
id: 4,
name: "tertiary",
class: "tertiary.js",
@ -71,21 +78,21 @@ const testsMock = () => {
isCompound: true,
type: "api",
description: "This is a single test",
tags: ["cron_1hour","reg_us", "proj_core"],
tags: ["cron_1hour", "reg_us", "proj_core"],
path: "tests/assets/suite/tertiary.js",
created: Date.now(),
mergeRequest: "https://example.com"
mergeRequest: "https://example.com",
},
];
};
const mappingsMock = () => {
return [
["primary", "secondary1", "tertiary1"],
["primary", "secondary1", "tertiary2"],
["primary", "secondary2", "tertiary3"],
];
}
["primary", "secondary1", "tertiary1"],
["primary", "secondary1", "tertiary2"],
["primary", "secondary2", "tertiary3"],
];
};
export const getTests = async () => {
if (PG_DISABLED) return testsMock();

View file

@ -10,33 +10,35 @@ import {
const table = "test_results";
const PG_DISABLED = process.env.POSTGRES_DISABLED;
const failingMock = () => {
return [{
name: "failing",
class: "failing.js",
timestamp: new Date().toJSON(),
method: "FAKEMETHOD",
cron: "1hour",
type: "api",
dailyFails: 12,
screenshot: "https://picsum.photos/1920/1080",
recentResults: [1, 0, 0, 1, 0],
isCompound: false,
failedMessage: `Some Test FailureMessage`,
},{
name: "secondary",
class: "secondary.js",
timestamp: new Date().toJSON(),
method: "FAKEMETHOD",
cron: "1hour",
type: "api",
dailyFails: 1,
screenshot: "https://picsum.photos/1920/1080",
recentResults: [1, 0, 0, 1, 0],
isCompound: true,
failedMessage: `Some Test FailureMessage from Secondary`,
}]
return [
{
name: "failing",
class: "failing.js",
timestamp: new Date().toJSON(),
method: "FAKEMETHOD",
cron: "1hour",
type: "api",
dailyFails: 12,
screenshot: "https://picsum.photos/1920/1080",
recentResults: [1, 0, 0, 1, 0],
isCompound: false,
failedMessage: `Some Test FailureMessage`,
},
{
name: "secondary",
class: "secondary.js",
timestamp: new Date().toJSON(),
method: "FAKEMETHOD",
cron: "1hour",
type: "api",
dailyFails: 1,
screenshot: "https://picsum.photos/1920/1080",
recentResults: [1, 0, 0, 1, 0],
isCompound: true,
failedMessage: `Some Test FailureMessage from Secondary`,
},
];
};
// Queries
export const insertTestResult = (testResult) => {

View file

@ -7,7 +7,8 @@ const { default: executorConfig } = await import(
// Load config and args
const args = process.argv.slice(2);
const config = normalize(executorConfig(args));
const payload = JSON.parse(Buffer.from(args[0], "base64").toString("utf8"));
const config = normalize(executorConfig(payload));
// Start Executor
const exec = new Executor(args, config);
const exec = new Executor(config, payload);
exec.runJob();

View file

@ -34,9 +34,15 @@ const pipelineMaxLife = (testName) => {
const buildCompound = (jobReq, socketId) => {
const { testName, command } = jobReq;
const pipelineTriggers = jobReq.pipelineTriggers;
if (pipelineTriggers) command.push(`pipelineTriggers=${pipelineTriggers}`);
command.push(`pipelineDashboardSocket=${socketId}`);
const { pipeline } = jobReq;
if (pipeline) {
pipeline.dashboardSocketId = socketId;
const pipelineArg = Buffer.from(JSON.stringify(pipeline), "utf8").toString(
"base64"
);
command.push(`pipeline=${pipelineArg}`);
}
return { ...jobReq, command };
};

View file

@ -7,8 +7,8 @@ const { command } = job.spec.template.spec.containers[0];
INFO("EXEC", "Internal Executor Starting!");
cp.exec(command, (error, stdout, stderr) => {
if (error) ERR("EXEC", error);
//if(stdout) VERB("EXEC-STDOUT", stdout);
//if(stderr) VERB("EXEC-STDERR", stderr);
//if (stdout) VERB("EXEC-STDOUT", stdout);
//if (stderr) VERB("EXEC-STDERR", stderr);
OK("EXEC", "Internal Executor Finished!");
process.exit(error ? 1 : 0);
});

View file

@ -22,9 +22,11 @@ const wrapCommand = (jobId, command) => {
? `node ${executorBin}`
: `chmod +x ${executorBin} && ./${executorBin}`;
const cmd = command.map((arg) => JSON.stringify(arg));
const curlCmd = `curl -o qltr-executor ${executorUrl} && ${bin} ${qualiteerUrl} ${jobId} ${cmd.join(
" "
)}`;
const payload = Buffer.from(
JSON.stringify({ jobId, command, url: qualiteerUrl }),
"utf8"
).toString("base64");
const curlCmd = `curl -o qltr-executor ${executorUrl} && ${bin} ${payload}`;
return curlCmd;
};

View file

@ -7,25 +7,25 @@ const nest = (arr) => {
};
export const asTree = (branches) => {
const nests = branches.map((b)=>nest(b));
return _.merge(...nests);
};
const nests = branches.map((b) => nest(b));
return _.merge(...nests);
};
export const asBranches = (array) => {
const merged = [];
array.forEach((p, i) => {
p.forEach((v, i) => {
if (!merged[i]) merged[i] = [];
if (!merged[i].includes(v)) merged[i].push(v);
});
const merged = [];
array.forEach((p, i) => {
p.forEach((v, i) => {
if (!merged[i]) merged[i] = [];
if (!merged[i].includes(v)) merged[i].push(v);
});
return merged;
}
});
return merged;
};
export const as1d = (a) => [].concat.apply([], a);
export const selectBranch = (map,test) => {
const pipeline = map.find((pm)=>pm.includes(test));
const testIndex = pipeline.findIndex((t) => t === test);
return pipeline.slice(0, testIndex + 1);
}
export const selectBranch = (map, test) => {
const pipeline = map.find((pm) => pm.includes(test));
const testIndex = pipeline.findIndex((t) => t === test);
return pipeline.slice(0, testIndex + 1);
};

View file

@ -24,21 +24,18 @@ export default class TestResultsWorker extends Worker {
}
*/
onMessage(testResult) {
const { pipelineData, pipelineTriggers, pipelineDelay } = testResult;
const pipelineTrigger = { pipelineData, pipelineTriggers, pipelineDelay };
const { pipeline } = testResult;
// Alter to start next test
// TODO the delay should be autopopulated either by the suite, or filled in by the server
if (pipelineTriggers)
return this.pipelineTrigger(
pipelineTrigger,
testResult.pipelineDashboardSocket
);
this.pipelineClose(testResult.pipelineDashboardSocket);
if (pipeline) return this.pipelineTrigger(pipeline);
const { dashboardSocketId: dsi } = pipeline;
this.pipelineClose(dsi);
}
pipelineTrigger(pipelineTrigger, socketId) {
pipelineTrigger.pipelineDelay = 1000 * 5;
this.skio.to(socketId).emit(evt.PPL_TRG, pipelineTrigger);
pipelineTrigger(pipeline) {
const { dashboardSocketId: dsi } = pipeline;
this.skio.to(dsi).emit(evt.PPL_TRG, pipeline);
}
pipelineClose(socketId) {

View file

@ -7,7 +7,7 @@ router.use(jsonMiddleware());
// Get Routes
router.get("/silenced", (req, res) => {
getSilencedTests().then((t)=>res.send(t));
getSilencedTests().then((t) => res.send(t));
});
// Post Routes

View file

@ -9,12 +9,12 @@ router.use(jsonMiddleware({ limit: maxSize }));
// Get Routes
router.get("/tests", (req, res) => {
getTests().then((t)=>res.json(t));
getTests().then((t) => res.json(t));
});
router.get("/pipeline-mappings", (req,res)=>{
getPipelineMappings().then((m)=>res.json(m));
})
router.get("/pipeline-mappings", (req, res) => {
getPipelineMappings().then((m) => res.json(m));
});
// Post Routes
router.post("/update", (req, res) => {

View file

@ -7,7 +7,7 @@ router.use(jsonMiddleware());
// Get Routes
router.get("/failing", (req, res) => {
getCurrentlyFailing().then((f)=>res.json(f));
getCurrentlyFailing().then((f) => res.json(f));
});
// Post Routes

View file

@ -12,10 +12,10 @@ const ERR = "e";
const OUT = "o";
export default class Executor {
constructor(args, config, options = {}) {
this.url = config.url(args) ?? process.env.QUALITEER_URL;
this.jobId = config.jobId(args) ?? process.env.QUALITEER_JOB_ID;
this.command = config.command(args) ?? process.env.QUALITEER_COMMAND;
constructor(config, payload) {
this.url = config.url(payload) ?? process.env.QUALITEER_URL;
this.jobId = config.jobId(payload) ?? process.env.QUALITEER_JOB_ID;
this.command = config.command(payload) ?? process.env.QUALITEER_COMMAND;
this.mode = modes.EXEC;
// Internal Buffer
@ -24,10 +24,10 @@ export default class Executor {
this.buf[OUT] = "";
// Methods
this.spawn = options.spawn ?? this.spawn.bind(this);
this.report = options.report ?? this.report.bind(this);
this.onProcClose = options.onProcClose ?? this.onProcClose.bind(this);
this.onClose = options.onClose ?? this.onClose.bind(this);
this.spawn = this.spawn.bind(this);
this.report = this.report.bind(this);
this.onProcClose = this.onProcClose.bind(this);
this.onClose = this.onClose.bind(this);
}
spawn() {

View file

@ -54,29 +54,32 @@ export default class Initiator {
onCreate = onCreate ?? this.onCreate.bind(this);
onPipelineTrigger =
onPipelineTrigger ??
((trigger) => {
console.log("job trg:", trigger);
const testName = trigger.pipelineTriggers;
const pipelineData = trigger.pipelineData;
const pipelineTriggers = trigger.newPipelineTriggers;
const jobReq = {
...jobRequest,
testName,
pipelineData,
pipelineTriggers,
};
setTimeout(
() =>
this.newPipelineJob(
jobReq,
onLog,
onClose,
onCreate,
onPipelineTrigger,
onPipelineClose
),
trigger.pipelineDelay
);
((pipeline) => {
console.log("job trg:", pipeline);
const { triggers } = pipeline;
if (!Object.keys(triggers).length) onPipelineClose();
// For each trigger
for (var testName in triggers) {
const delay = triggers[testName].__testDelay ?? 0;
delete triggers[testName].__testDelay;
const jobReq = {
...jobRequest,
pipeline: { ...pipeline, triggers: triggers[testName] },
testName,
};
setTimeout(
() =>
this.newPipelineJob(
jobReq,
onLog,
onClose,
onCreate,
onPipelineTrigger,
onPipelineClose
),
delay
);
}
});
onPipelineClose = onPipelineClose ?? this.onPipelineClose.bind(this);
this.sk = mgr.socket("/");