Minor Adjustments
This commit is contained in:
parent
1084f5d937
commit
f486d50efa
60 changed files with 1965 additions and 127 deletions
23
libold/server/database/delays.js
Normal file
23
libold/server/database/delays.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const seconds = 1000;
|
||||
const minutes = 60 * seconds;
|
||||
const hours = 60 * minutes;
|
||||
export const DELAYS = {
|
||||
"1sec": 1 * seconds,
|
||||
"5sec": 5 * seconds,
|
||||
"10sec": 10 * seconds,
|
||||
"30sec": 30 * seconds,
|
||||
"1min": 1 * minutes,
|
||||
"5min": 5 * minutes,
|
||||
"10min": 10 * minutes,
|
||||
"15min": 15 * minutes,
|
||||
"30min": 30 * minutes,
|
||||
"1hour": 1 * hours,
|
||||
"2hour": 2 * hours,
|
||||
"3hour": 3 * hours,
|
||||
"4hour": 4 * hours,
|
||||
};
|
||||
|
||||
export default function getDelay(delayStr) {
|
||||
if (DELAYS[delayStr]) return DELAYS[delayStr];
|
||||
return 0;
|
||||
}
|
23
libold/server/database/migrations/1_create_catalog_table.sql
Normal file
23
libold/server/database/migrations/1_create_catalog_table.sql
Normal file
|
@ -0,0 +1,23 @@
|
|||
CREATE SEQUENCE catalog_id_seq;
|
||||
CREATE TABLE catalog (
|
||||
id bigint NOT NULL DEFAULT nextval('catalog_id_seq') PRIMARY KEY,
|
||||
name varchar(255) DEFAULT NULL,
|
||||
class varchar(255) DEFAULT NULL,
|
||||
image varchar(255) DEFAULT NULL,
|
||||
"path" varchar(255) DEFAULT NULL,
|
||||
description varchar(1023) DEFAULT NULL,
|
||||
type varchar(31) DEFAULT NULL,
|
||||
created TIMESTAMP NOT NULL DEFAULT now(),
|
||||
mr varchar(255) DEFAULT NULL,
|
||||
tags varchar(255)[] DEFAULT NULL,
|
||||
crons varchar(127)[] DEFAULT NULL,
|
||||
env varchar(31)[] DEFAULT NULL,
|
||||
regions varchar(15)[] DEFAULT NULL,
|
||||
triggers varchar(255)[] DEFAULT NULL,
|
||||
pipeline BOOLEAN DEFAULT FALSE,
|
||||
coverage varchar(255)[] DEFAULT NULL,
|
||||
projects varchar(255)[] DEFAULT NULL,
|
||||
delay varchar(31) DEFAULT NULL,
|
||||
CONSTRAINT unique_name UNIQUE(name)
|
||||
);
|
||||
ALTER SEQUENCE catalog_id_seq OWNED BY catalog.id;
|
15
libold/server/database/migrations/2_create_results_table.sql
Normal file
15
libold/server/database/migrations/2_create_results_table.sql
Normal file
|
@ -0,0 +1,15 @@
|
|||
CREATE SEQUENCE results_id_seq;
|
||||
CREATE TABLE results (
|
||||
id bigint NOT NULL DEFAULT nextval('results_id_seq') PRIMARY KEY,
|
||||
name varchar(255) DEFAULT NULL,
|
||||
class varchar(255) DEFAULT NULL,
|
||||
"method" varchar(255) DEFAULT NULL,
|
||||
env varchar(31) DEFAULT NULL,
|
||||
"timestamp" TIMESTAMP NOT NULL DEFAULT now(),
|
||||
triage BOOLEAN DEFAULT FALSE,
|
||||
failed BOOLEAN DEFAULT FALSE,
|
||||
message varchar(2047) DEFAULT NULL,
|
||||
screenshot varchar(255) DEFAULT NULL,
|
||||
console varchar(255) DEFAULT NULL
|
||||
);
|
||||
ALTER SEQUENCE results_id_seq OWNED BY results.id;
|
|
@ -0,0 +1,9 @@
|
|||
CREATE SEQUENCE alerting_id_seq;
|
||||
CREATE TABLE alerting (
|
||||
id bigint NOT NULL DEFAULT nextval('alerting_id_seq') PRIMARY KEY,
|
||||
name varchar(255) DEFAULT NULL,
|
||||
class varchar(255) DEFAULT NULL,
|
||||
"method" varchar(255) DEFAULT NULL,
|
||||
expires TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
ALTER SEQUENCE alerting_id_seq OWNED BY alerting.id;
|
11
libold/server/database/mocks/alerting-mock.js
Normal file
11
libold/server/database/mocks/alerting-mock.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
export const silencedMock = () => {
|
||||
return [
|
||||
{
|
||||
name: `failing`,
|
||||
class: `failing.js`,
|
||||
method: "FAKEMETHOD",
|
||||
id: 0,
|
||||
silencedUntil: new Date().toJSON(),
|
||||
},
|
||||
];
|
||||
};
|
153
libold/server/database/mocks/catalog-mock.js
Normal file
153
libold/server/database/mocks/catalog-mock.js
Normal file
|
@ -0,0 +1,153 @@
|
|||
export const testsMock = () => {
|
||||
return [
|
||||
{
|
||||
id: 0,
|
||||
name: "single",
|
||||
class: "single.js",
|
||||
image: "node:latest",
|
||||
isPipeline: false,
|
||||
type: "api",
|
||||
description: "This is a single test",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "skip_alt"],
|
||||
path: "tests/assets/suite/single.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 1,
|
||||
name: "failing",
|
||||
class: "failing.js",
|
||||
image: "node:latest",
|
||||
isPipeline: false,
|
||||
type: "ui",
|
||||
description: "This is a failing test",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
|
||||
path: "tests/assets/suite/failing.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "primary",
|
||||
class: "primary.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a primary test",
|
||||
tags: [
|
||||
"cron_1hour",
|
||||
"reg_us",
|
||||
"proj_core",
|
||||
"skip_alt",
|
||||
"pipeline_secondary1",
|
||||
],
|
||||
path: "tests/assets/suite/primary.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "secondary1",
|
||||
class: "secondary1.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a secondary test",
|
||||
tags: [
|
||||
"cron_1hour",
|
||||
"reg_us",
|
||||
"proj_core",
|
||||
"compound_tertiary1",
|
||||
"compound_tertiary2",
|
||||
],
|
||||
path: "tests/assets/suite/secondary1.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "secondary2",
|
||||
class: "secondary2.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a secondary2 test",
|
||||
tags: ["cron_1hour", "reg_us", "proj_core", "compound_tertiary3"],
|
||||
path: "tests/assets/suite/secondary2.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "tertiary1",
|
||||
class: "tertiary1.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a third test",
|
||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
||||
path: "tests/assets/suite/tertiary1.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
name: "tertiary2",
|
||||
class: "tertiary2.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a third2 test",
|
||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
||||
path: "tests/assets/suite/tertiary2.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "tertiary3",
|
||||
class: "tertiary3.js",
|
||||
image: "node:latest",
|
||||
isPipeline: true,
|
||||
type: "api",
|
||||
description: "This is a third3 test",
|
||||
tags: ["cron_1hour", "reg_us", "proj_core"],
|
||||
path: "tests/assets/suite/tertiary3.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
name: "single-alt",
|
||||
class: "single-alt.js",
|
||||
image: "node:latest",
|
||||
isPipeline: false,
|
||||
type: "ui",
|
||||
description: "This is an alternative test",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_alt"],
|
||||
path: "tests/assets/suite/single-alt.js",
|
||||
created: Date.now(),
|
||||
mergeRequest: "https://example.com",
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export const mappingsMock = () => {
|
||||
return [
|
||||
[
|
||||
{ name: "primary", delay: 0 },
|
||||
{ name: "secondary1", delay: 1000 },
|
||||
{ name: "tertiary1", delay: 0 },
|
||||
],
|
||||
[
|
||||
{ name: "primary", delay: 0 },
|
||||
{ name: "secondary1", delay: 1000 },
|
||||
{ name: "tertiary2", delay: 8000 },
|
||||
],
|
||||
[
|
||||
{ name: "primary", delay: 0 },
|
||||
{ name: "secondary2", delay: 0 },
|
||||
{ name: "tertiary3", delay: 3000 },
|
||||
],
|
||||
];
|
||||
};
|
30
libold/server/database/mocks/results-mock.js
Normal file
30
libold/server/database/mocks/results-mock.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
export const failingMock = () => {
|
||||
return [
|
||||
{
|
||||
name: "failing",
|
||||
class: "failing.js",
|
||||
timestamp: new Date().toJSON(),
|
||||
method: "FAKEMETHOD",
|
||||
cron: "1hour",
|
||||
type: "api",
|
||||
dailyFails: 12,
|
||||
screenshot: "https://picsum.photos/1920/1080",
|
||||
recentResults: [1, 0, 0, 1, 0],
|
||||
isPipeline: false,
|
||||
failedMessage: `Some Test FailureMessage`,
|
||||
},
|
||||
{
|
||||
name: "secondary1",
|
||||
class: "secondary1.js",
|
||||
timestamp: new Date().toJSON(),
|
||||
method: "FAKEMETHOD",
|
||||
cron: "1hour",
|
||||
type: "api",
|
||||
dailyFails: 1,
|
||||
screenshot: "https://picsum.photos/1920/1080",
|
||||
recentResults: [1, 0, 0, 1, 0],
|
||||
isPipeline: true,
|
||||
failedMessage: `Some Test FailureMessage from Secondary1`,
|
||||
},
|
||||
];
|
||||
};
|
121
libold/server/database/pg-query.js
Normal file
121
libold/server/database/pg-query.js
Normal file
|
@ -0,0 +1,121 @@
|
|||
const buildPostgresEntry = (entry) => {
|
||||
const pgEntry = { ...entry };
|
||||
Object.keys(pgEntry).forEach((col) => {
|
||||
if (pgEntry[col] === undefined) delete pgEntry[col];
|
||||
});
|
||||
return pgEntry;
|
||||
};
|
||||
|
||||
export const buildPostgresValue = (jsVar) => {
|
||||
if (jsVar === null) return "null";
|
||||
if (typeof jsVar === "string") return buildPostgresString(jsVar);
|
||||
if (Array.isArray(jsVar) && jsVar.length === 0) return "null";
|
||||
if (Array.isArray(jsVar) && isTypeArray(jsVar, "string"))
|
||||
return buildPostgresStringArray(jsVar);
|
||||
return jsVar;
|
||||
};
|
||||
|
||||
const buildPostgresStringArray = (jsonArray) => {
|
||||
if (jsonArray.length === 0) return null;
|
||||
var pgArray = [...jsonArray];
|
||||
var arrayString = "ARRAY [";
|
||||
pgArray.forEach((e, i) => (pgArray[i] = `'${e}'`));
|
||||
arrayString += pgArray.join(",");
|
||||
arrayString += "]";
|
||||
return arrayString;
|
||||
};
|
||||
|
||||
const isTypeArray = (jsonArray, type) =>
|
||||
jsonArray.every((e) => typeof e === type);
|
||||
|
||||
const buildPostgresString = (jsonString) =>
|
||||
(jsonString && `'${jsonString.replaceAll("'", "''")}'`) || null;
|
||||
|
||||
export const insertQuery = (table, jsEntry) => {
|
||||
if (typeof jsEntry !== "object") throw Error("PG Inserts must be objects!");
|
||||
const entry = buildPostgresEntry(jsEntry);
|
||||
const cols = Object.keys(entry);
|
||||
cols.forEach((col, i) => {
|
||||
entry[col] = buildPostgresValue(entry[col]);
|
||||
cols[i] = `"${col}"`;
|
||||
});
|
||||
var query = `INSERT INTO ${table}(${cols.join(",")})\n`;
|
||||
query += `VALUES(${Object.values(entry).join(",")})`;
|
||||
return query;
|
||||
};
|
||||
|
||||
export const deleteQuery = (table, jsEntry) => {
|
||||
if (typeof jsEntry !== "object")
|
||||
throw Error("PG Delete conditionals must be an object!");
|
||||
const entry = buildPostgresEntry(jsEntry);
|
||||
const cols = Object.keys(entry);
|
||||
const conditionals = [];
|
||||
for (var col of cols) {
|
||||
entry[col] = buildPostgresValue(entry[col]);
|
||||
if (entry[col] === "null") conditionals.push(`x.${col} IS NULL`);
|
||||
else conditionals.push(`x.${col}=${entry[col]}`);
|
||||
}
|
||||
return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`;
|
||||
};
|
||||
export const onConflictUpdate = (conflicts, updates) => {
|
||||
if (!Array.isArray(conflicts)) throw Error("PG Conflicts must be an array!");
|
||||
if (typeof updates !== "object") throw Error("PG Updates must be objects!");
|
||||
const entry = buildPostgresEntry(updates);
|
||||
var query = `ON CONFLICT (${conflicts.join(",")}) DO UPDATE SET\n`;
|
||||
const cols = Object.keys(entry);
|
||||
for (var col of cols) {
|
||||
entry[col] = buildPostgresValue(entry[col]);
|
||||
}
|
||||
query += cols.map((c) => `${c}=${entry[c]}`).join(",");
|
||||
return query;
|
||||
};
|
||||
export const clearTableQuery = (table) => {
|
||||
return `TRUNCATE ${table}`;
|
||||
};
|
||||
|
||||
export const selectWhereQuery = (table, jsEntry, joinWith) => {
|
||||
if (typeof jsEntry !== "object") throw Error("PG Where must be an object!");
|
||||
const where = buildPostgresEntry(jsEntry);
|
||||
const cols = Object.keys(where);
|
||||
var query = `SELECT * FROM ${table} AS x WHERE\n`;
|
||||
for (var col of cols) {
|
||||
where[col] = buildPostgresValue(where[col]);
|
||||
}
|
||||
return (query += cols.map((c) => `x.${c}=${where[c]}`).join(joinWith));
|
||||
};
|
||||
export const updateWhereQuery = (table, updates, wheres, joinWith) => {
|
||||
if (typeof updates !== "object") throw Error("PG Updates must be an object!");
|
||||
if (typeof wheres !== "object") throw Error("PG Wheres must be an object!");
|
||||
const update = buildPostgresEntry(updates);
|
||||
const where = buildPostgresEntry(wheres);
|
||||
const updateCols = Object.keys(update);
|
||||
const whereCols = Object.keys(where);
|
||||
var query = `UPDATE ${table}\n`;
|
||||
var updateQuery = updateCols
|
||||
.map((c) => `${c} = ${buildPostgresValue(update[c])}`)
|
||||
.join(",");
|
||||
var whereQuery = whereCols
|
||||
.map((c) => `${c} = ${buildPostgresValue(where[c])}`)
|
||||
.join(joinWith);
|
||||
return (query += `SET ${updateQuery} WHERE ${whereQuery}`);
|
||||
};
|
||||
export const updateWhereAnyQuery = (table, updates, wheres) =>
|
||||
updateWhereQuery(table, updates, wheres, " OR ");
|
||||
export const updateWhereAllQuery = (table, updates, wheres) =>
|
||||
updateWhereQuery(table, updates, wheres, " AND ");
|
||||
export const selectWhereAnyQuery = (table, where) =>
|
||||
selectWhereQuery(table, where, " OR ");
|
||||
export const selectWhereAllQuery = (table, where) =>
|
||||
selectWhereQuery(table, where, " AND ");
|
||||
|
||||
export default {
|
||||
selectWhereAnyQuery,
|
||||
selectWhereAllQuery,
|
||||
updateWhereAnyQuery,
|
||||
updateWhereAllQuery,
|
||||
insertQuery,
|
||||
deleteQuery,
|
||||
buildPostgresValue,
|
||||
onConflictUpdate,
|
||||
clearTableQuery,
|
||||
};
|
62
libold/server/database/postgres.js
Normal file
62
libold/server/database/postgres.js
Normal file
|
@ -0,0 +1,62 @@
|
|||
// Imports
|
||||
import path from "node:path";
|
||||
import { URL } from "node:url";
|
||||
import { migrate } from "postgres-migrations";
|
||||
import createPgp from "pg-promise";
|
||||
import moment from "moment";
|
||||
import { INFO, WARN, OK, VERB } from "../util/logging.js";
|
||||
// Environment Variables
|
||||
const {
|
||||
QUALITEER_POSTGRES_DATABASE: database,
|
||||
QUALITEER_POSTGRES_ENABLED: pgEnabled,
|
||||
QUALITEER_POSTGRES_HOST: host,
|
||||
QUALITEER_POSTGRES_PASSWORD: password,
|
||||
QUALITEER_POSTGRES_PORT: port,
|
||||
QUALITEER_POSTGRES_USER: user,
|
||||
} = process.env;
|
||||
|
||||
// Postgres-promise Configuration
|
||||
// Ensure dates get saved as UTC date strings
|
||||
// This prevents the parser from doing strange datetime operations
|
||||
const pgp = createPgp();
|
||||
pgp.pg.types.setTypeParser(1114, (str) => moment.utc(str).format());
|
||||
|
||||
// Database Config
|
||||
const dbConfig = {
|
||||
database: database ?? "qualiteer",
|
||||
user: user ?? "postgres",
|
||||
password: password ?? "postgres",
|
||||
host: host ?? "localhost",
|
||||
port: port ?? 5432,
|
||||
ensureDatabaseExists: true,
|
||||
};
|
||||
|
||||
const databaseDir = new URL(".", import.meta.url).pathname;
|
||||
const migrationsDir = path.resolve(databaseDir, "migrations/");
|
||||
|
||||
const queryMock = (str) => INFO("POSTGRES MOCK", str);
|
||||
|
||||
const connect = (pg) => async () => {
|
||||
if (pgEnabled === "false") {
|
||||
WARN("POSTGRES", "Postgres Disabled!");
|
||||
return { query: queryMock };
|
||||
}
|
||||
VERB("POSTGRES", "Migrating...");
|
||||
await migrate(dbConfig, migrationsDir);
|
||||
// Override fake methods
|
||||
const pgInstance = pgp(dbConfig);
|
||||
for (var k in pgInstance) pg[k] = pgInstance[k];
|
||||
VERB("POSTGRES", "Migrated Successfully");
|
||||
await pg.connect();
|
||||
VERB("POSTGRES", "Postgres connected Successfully!");
|
||||
|
||||
OK("POSTGRES", `Connected to database ${dbConfig.database}!`);
|
||||
};
|
||||
|
||||
const buildPostgres = () => {
|
||||
var pg = { query: queryMock };
|
||||
pg.connect = connect(pg);
|
||||
return pg;
|
||||
};
|
||||
|
||||
export default buildPostgres();
|
56
libold/server/database/queries/alerting.js
Normal file
56
libold/server/database/queries/alerting.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
import pg from "../postgres.js";
|
||||
import { silencedMock } from "../mocks/alerting-mock.js";
|
||||
import moment from "moment";
|
||||
// Imports
|
||||
import {
|
||||
insertQuery,
|
||||
selectWhereAnyQuery,
|
||||
updateWhereAnyQuery,
|
||||
deleteQuery,
|
||||
} from "../pg-query.js";
|
||||
// Constants
|
||||
const table = "alerting";
|
||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
||||
|
||||
export const upsertAlertSilence = async (silence) => {
|
||||
const {
|
||||
id,
|
||||
name,
|
||||
class: className,
|
||||
method,
|
||||
expires: duration,
|
||||
keepExpires,
|
||||
} = silence;
|
||||
const { h, m } = duration;
|
||||
const expires = moment().add(h, "hours").add(m, "minutes").utc().format();
|
||||
const entry = {
|
||||
name,
|
||||
class: className,
|
||||
method,
|
||||
expires: keepExpires ? undefined : expires,
|
||||
};
|
||||
const asUpdate = {};
|
||||
for (var k of Object.keys(entry))
|
||||
asUpdate[k] = entry[k] === "*" ? null : entry[k];
|
||||
var query = id
|
||||
? updateWhereAnyQuery(table, asUpdate, { id })
|
||||
: insertQuery(table, entry);
|
||||
return pg.query(query);
|
||||
};
|
||||
|
||||
export const deleteAlertSilence = async (silence) => {
|
||||
const { id } = silence;
|
||||
const query = deleteQuery(table, { id });
|
||||
return pg.query(query);
|
||||
};
|
||||
|
||||
// Queries
|
||||
export const getSilencedTests = async () => {
|
||||
if (PG_DISABLED) return silencedMock();
|
||||
const query = `SELECT * from ${table}`;
|
||||
const silenced = await pg.query(query);
|
||||
silenced.forEach((t, i) => {
|
||||
for (var k of Object.keys(t)) silenced[i][k] = t[k] === null ? "*" : t[k];
|
||||
});
|
||||
return silenced;
|
||||
};
|
121
libold/server/database/queries/catalog.js
Normal file
121
libold/server/database/queries/catalog.js
Normal file
|
@ -0,0 +1,121 @@
|
|||
import pg from "../postgres.js";
|
||||
// Imports
|
||||
import {
|
||||
insertQuery,
|
||||
selectWhereAnyQuery,
|
||||
onConflictUpdate,
|
||||
} from "../pg-query.js";
|
||||
import { WARN } from "../../util/logging.js";
|
||||
|
||||
import getFilteredTags from "../tags.js";
|
||||
import getDelay from "../delays.js";
|
||||
// Constants
|
||||
const table = "catalog";
|
||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
||||
import { testsMock, mappingsMock } from "../mocks/catalog-mock.js";
|
||||
// Queries
|
||||
|
||||
export const removeDroppedTests = async (testNames) => {
|
||||
// BUG: After dropping a test, the id jumps ridiculously high
|
||||
const pgNames = testNames.map((tn) => `'${tn}'`).join(",");
|
||||
const query = `DELETE FROM catalog as x where x.name not in (${pgNames});`;
|
||||
return pg.query(query);
|
||||
};
|
||||
|
||||
export const getTest = async (name) => {
|
||||
const query = selectWhereAnyQuery(table, { name });
|
||||
const results = await pg.query(query);
|
||||
if (results.length > 1)
|
||||
WARN("CATALOG", `More than 1 test found for '${name}'`);
|
||||
return results[0];
|
||||
};
|
||||
|
||||
export const getTests = async () => {
|
||||
if (PG_DISABLED) return testsMock();
|
||||
const query = `SELECT * from ${table}`;
|
||||
return pg.query(query);
|
||||
};
|
||||
|
||||
export const getPipelineMappings = async () => {
|
||||
if (PG_DISABLED) return mappingsMock();
|
||||
const query = `SELECT * from ${table} WHERE pipeline`;
|
||||
const tests = await pg.query(query);
|
||||
const mappings = [];
|
||||
var newTrigger;
|
||||
for (var test of tests) {
|
||||
if (test.triggers) continue;
|
||||
const { name, delay: delayStr } = test;
|
||||
var triggerStack = [{ name, delay: getDelay(delayStr), delayStr }];
|
||||
newTrigger = { name, delayStr };
|
||||
while (
|
||||
(newTrigger = tests.find(
|
||||
(te) => te.triggers && te.triggers.includes(newTrigger.name)
|
||||
)) !== null
|
||||
) {
|
||||
if (!newTrigger) break;
|
||||
triggerStack.push({
|
||||
name: newTrigger.name,
|
||||
delay: getDelay(newTrigger.delay),
|
||||
delayStr: newTrigger.delay,
|
||||
});
|
||||
}
|
||||
mappings.push(triggerStack.reverse());
|
||||
}
|
||||
return mappings;
|
||||
};
|
||||
|
||||
export const getProjects = async () => {
|
||||
if (PG_DISABLED) {
|
||||
const tests = testsMock();
|
||||
}
|
||||
};
|
||||
|
||||
export const truncateTests = async () => {
|
||||
if (PG_DISABLED) return console.log(`Would truncate table ${table}`);
|
||||
const query = `TRUNCATE ${table} RESTART IDENTITY CASCADE;`;
|
||||
return await pg.query(query);
|
||||
};
|
||||
|
||||
export const upsertTest = async (test) => {
|
||||
if (PG_DISABLED) return console.log("Would insert test", test);
|
||||
const {
|
||||
name,
|
||||
class: className,
|
||||
image,
|
||||
path,
|
||||
description,
|
||||
type,
|
||||
created,
|
||||
mergeRequest,
|
||||
tags,
|
||||
} = test;
|
||||
|
||||
const filteredTags = getFilteredTags(tags);
|
||||
|
||||
const env =
|
||||
filteredTags.ignore && filteredTags.env
|
||||
? filteredTags.env.filter((e) => !filteredTags.ignore.includes(e))
|
||||
: filteredTags.env;
|
||||
const catalogEntry = {
|
||||
name,
|
||||
class: className,
|
||||
image,
|
||||
path,
|
||||
description: description ? description : null,
|
||||
type,
|
||||
created,
|
||||
mr: mergeRequest,
|
||||
tags,
|
||||
crons: filteredTags.crons,
|
||||
env,
|
||||
regions: filteredTags.regions,
|
||||
triggers: filteredTags.triggers,
|
||||
pipeline: filteredTags.pipeline ? true : false,
|
||||
coverage: filteredTags.coverage,
|
||||
projects: filteredTags.projects,
|
||||
delay: filteredTags.delay ? filteredTags.delay[0] : null,
|
||||
};
|
||||
const query =
|
||||
insertQuery(table, catalogEntry) + onConflictUpdate(["name"], catalogEntry);
|
||||
return await pg.query(query);
|
||||
};
|
93
libold/server/database/queries/results.js
Normal file
93
libold/server/database/queries/results.js
Normal file
|
@ -0,0 +1,93 @@
|
|||
import pg from "../postgres.js";
|
||||
import { failingMock } from "../mocks/results-mock.js";
|
||||
// Imports
|
||||
import {
|
||||
insertQuery,
|
||||
selectWhereAnyQuery,
|
||||
selectWhereAllQuery,
|
||||
updateWhereAnyQuery,
|
||||
} from "../pg-query.js";
|
||||
// Constants
|
||||
const table = "results";
|
||||
const recentResultsMax = 5;
|
||||
const PG_DISABLED = process.env.POSTGRES_DISABLED;
|
||||
|
||||
// Queries
|
||||
export const insertTestResult = (testResult) => {
|
||||
const {
|
||||
name,
|
||||
class: className,
|
||||
method,
|
||||
env,
|
||||
timestamp,
|
||||
triage,
|
||||
failed,
|
||||
message,
|
||||
screenshot,
|
||||
console: cs,
|
||||
} = testResult;
|
||||
|
||||
var query = insertQuery(table, {
|
||||
name,
|
||||
class: className,
|
||||
method,
|
||||
env,
|
||||
timestamp,
|
||||
triage,
|
||||
failed,
|
||||
message,
|
||||
screenshot,
|
||||
console: cs,
|
||||
});
|
||||
|
||||
query += "\n RETURNING *";
|
||||
return pg.query(query);
|
||||
};
|
||||
|
||||
export const getCurrentlyFailing = async () => {
|
||||
if (PG_DISABLED) return failingMock();
|
||||
/* This can probably be changed into a super query, but perhaps faster/smaller */
|
||||
const recent = `SELECT * FROM ${table} WHERE (timestamp BETWEEN NOW() - INTERVAL '24 HOURS' AND NOW()) AND NOT(failed AND triage)`;
|
||||
const slimCatalog = `SELECT name, crons, class, type, pipeline, env AS enabled_env FROM catalog`;
|
||||
const failing = `SELECT * FROM recent INNER JOIN slim_catalog USING(name) WHERE timestamp = (SELECT MAX(timestamp) FROM recent r2 WHERE recent.name = r2.name) AND failed`;
|
||||
const applicableFailing = `SELECT name, count(*) as fails FROM recent WHERE recent.name IN (SELECT name FROM failing) GROUP BY name`;
|
||||
/*const runHistory = `SELECT name, timestamp, failed FROM (SELECT *, ROW_NUMBER() OVER(PARTITION BY name ORDER BY timestamp) as n
|
||||
FROM ${table} WHERE name IN (SELECT name FROM failing)) as ord WHERE n <= ${recentResultsMax} ORDER BY name DESC`;*/
|
||||
const runHistory = `SELECT name, timestamp, failed FROM results WHERE NOT triage AND name IN (SELECT name FROM failing) ORDER BY timestamp DESC LIMIT ${recentResultsMax}`;
|
||||
// const recentQuery = pg.query(recent);
|
||||
const failingQuery = pg.query(
|
||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}) ${failing}`
|
||||
);
|
||||
const applicableQuery = pg.query(
|
||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${applicableFailing}`
|
||||
);
|
||||
const historyQuery = pg.query(
|
||||
`WITH recent as (${recent}), slim_catalog as (${slimCatalog}), failing as (${failing}) ${runHistory}`
|
||||
);
|
||||
|
||||
const [currentlyFailing, applicableFails, failHistory] = await Promise.all([
|
||||
failingQuery,
|
||||
applicableQuery,
|
||||
historyQuery,
|
||||
]);
|
||||
for (var i = 0; i < currentlyFailing.length; i++) {
|
||||
currentlyFailing[i].dailyFails = parseInt(
|
||||
applicableFails.find((af) => af.name === currentlyFailing[i].name).fails
|
||||
);
|
||||
currentlyFailing[i].recentResults = [];
|
||||
currentlyFailing[i].enabledEnv = currentlyFailing[i].enabled_env;
|
||||
currentlyFailing[i].isPipeline = currentlyFailing[i].pipeline;
|
||||
delete currentlyFailing[i].enabled_env;
|
||||
delete currentlyFailing[i].pipeline;
|
||||
for (var fh of failHistory) {
|
||||
if (fh.name !== currentlyFailing[i].name) continue;
|
||||
currentlyFailing[i].recentResults.push(fh);
|
||||
}
|
||||
}
|
||||
return currentlyFailing;
|
||||
};
|
||||
|
||||
export const ignoreResult = async ({ id }) => {
|
||||
const query = updateWhereAnyQuery(table, { failed: false }, { id });
|
||||
return pg.query(query);
|
||||
};
|
11
libold/server/database/seeds/alerting-seed.js
Normal file
11
libold/server/database/seeds/alerting-seed.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
export const table = "alerting";
|
||||
export const seed = () => {
|
||||
return [
|
||||
{
|
||||
name: `failing`,
|
||||
class: `failing.js`,
|
||||
method: "FAKEMETHOD",
|
||||
expires: new Date().toJSON(),
|
||||
},
|
||||
];
|
||||
};
|
126
libold/server/database/seeds/catalog-seed.js
Normal file
126
libold/server/database/seeds/catalog-seed.js
Normal file
|
@ -0,0 +1,126 @@
|
|||
export const table = "catalog";
|
||||
export const seed = () => {
|
||||
return [
|
||||
{
|
||||
name: "single",
|
||||
class: "single.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/single.js",
|
||||
description: "This is a single test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core", "ignore_alt"],
|
||||
},
|
||||
{
|
||||
name: "failing",
|
||||
class: "failing.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/failing.js",
|
||||
description: "This is a failing test",
|
||||
type: "ui",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_core"],
|
||||
},
|
||||
{
|
||||
name: "primary",
|
||||
class: "primary.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/primary.js",
|
||||
description: "This is a primary test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: [
|
||||
"pipeline",
|
||||
"cron_1hour",
|
||||
"reg_us",
|
||||
"proj_core",
|
||||
"ignore_alt",
|
||||
"triggers_secondary1",
|
||||
"triggers_secondary2",
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "secondary1",
|
||||
class: "secondary1.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/secondary1.js",
|
||||
description: "This is a secondary test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: [
|
||||
"pipeline",
|
||||
"cron_1hour",
|
||||
"reg_us",
|
||||
"proj_core",
|
||||
"triggers_tertiary1",
|
||||
"triggers_tertiary2",
|
||||
"delay_1sec",
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "secondary2",
|
||||
class: "secondary2.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/secondary2.js",
|
||||
description: "This is a secondary2 test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: [
|
||||
"pipeline",
|
||||
"cron_1hour",
|
||||
"reg_us",
|
||||
"proj_core",
|
||||
"triggers_tertiary3",
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "tertiary1",
|
||||
class: "tertiary1.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/tertiary1.js",
|
||||
description: "This is a third test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core"],
|
||||
},
|
||||
{
|
||||
name: "tertiary2",
|
||||
class: "tertiary2.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/tertiary2.js",
|
||||
description: "This is a third2 test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_10sec"],
|
||||
},
|
||||
{
|
||||
name: "tertiary3",
|
||||
class: "tertiary3.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/tertiary3.js",
|
||||
description: "This is a third3 test",
|
||||
type: "api",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["pipeline", "cron_1hour", "reg_us", "proj_core", "delay_5sec"],
|
||||
},
|
||||
{
|
||||
name: "single-alt",
|
||||
class: "single-alt.js",
|
||||
image: "node:latest",
|
||||
path: "tests/assets/suite/single-alt.js",
|
||||
description: "This is an alternative test",
|
||||
type: "ui",
|
||||
created: new Date().toJSON(),
|
||||
mergeRequest: "https://example.com",
|
||||
tags: ["cron_1hour", "reg_us", "env_ci", "proj_alt"],
|
||||
},
|
||||
];
|
||||
};
|
29
libold/server/database/seeds/results-seed.js
Normal file
29
libold/server/database/seeds/results-seed.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
export const table = "results";
|
||||
export const seed = () => {
|
||||
return [
|
||||
{
|
||||
name: "failing",
|
||||
class: "failing.js",
|
||||
method: "FAKEMETHOD",
|
||||
env: "prod",
|
||||
timestamp: new Date().toJSON(),
|
||||
triage: false,
|
||||
failed: true,
|
||||
message: "Some Test FailureMessage",
|
||||
screenshot: "https://picsum.photos/1920/1080",
|
||||
console: "https://example.com",
|
||||
},
|
||||
{
|
||||
name: "secondary1",
|
||||
class: "secondary1.js",
|
||||
method: "FAKEMETHOD",
|
||||
env: "prod",
|
||||
timestamp: new Date().toJSON(),
|
||||
triage: false,
|
||||
failed: true,
|
||||
message: "Some Test FailureMessage from Secondary1",
|
||||
screenshot: "https://picsum.photos/1920/1080",
|
||||
console: "https://example.com",
|
||||
},
|
||||
];
|
||||
};
|
26
libold/server/database/tags.js
Normal file
26
libold/server/database/tags.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
import { WARN } from "../util/logging.js";
|
||||
export const TAGS = {
|
||||
IGNORE: { name: "ignore", tag: "ignore_", value: (t) => t },
|
||||
CRON: { name: "crons", tag: "cron_", value: (t) => t },
|
||||
ENV: { name: "env", tag: "env_", value: (t) => t },
|
||||
REGIONS: { name: "regions", tag: "reg_", value: (t) => t },
|
||||
PIPELINE: { name: "pipeline", tag: "is_pipeline", value: (t) => t },
|
||||
COVERAGE: { name: "coverage", tag: "coverage_", value: (t) => t },
|
||||
PROJECT: { name: "projects", tag: "proj_", value: (t) => t },
|
||||
DELAY: { name: "delay", tag: "delay_", value: (t) => t },
|
||||
TRIGGERS: { name: "triggers", tag: "triggers_", value: (t) => t },
|
||||
};
|
||||
|
||||
export default function getFilteredTags(tags) {
|
||||
const filtered = {};
|
||||
for (var t of tags) {
|
||||
const tag = Object.values(TAGS).find((ta) => t.startsWith(ta.tag));
|
||||
if (!tag) {
|
||||
WARN("CATALOG", `Tag '${t}' did not have a valid prefix!`);
|
||||
continue;
|
||||
}
|
||||
if (!filtered[tag.name]) filtered[tag.name] = [];
|
||||
filtered[tag.name].push(tag.value(t.replace(tag.tag, "")));
|
||||
}
|
||||
return filtered;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue