Merge branch 'ep/Nov17/DbConsolidation' into 'master'

Migrate to Postgres over Mongo

See merge request Dunemask/nubian!3
This commit is contained in:
Elijah Dunemask 2021-11-18 00:21:58 +00:00
commit 3e45c7efca
23 changed files with 534 additions and 466 deletions

View file

@ -1,2 +1 @@
src/uploads/
uploads/
buckets/

3
.gitignore vendored
View file

@ -9,5 +9,4 @@ src/uploads/
.env
.env.dev
.env.prod
newsrc/
uploads/
buckets/

View file

@ -19,19 +19,15 @@
"homepage": "https://gitlab.com/Dunemask/nubian#readme",
"dependencies": {
"axios": "^0.21.1",
"body-parser": "^1.19.0",
"chalk": "^4.1.2",
"express": "^4.17.1",
"express-bearer-token": "^2.4.0",
"figlet": "^1.5.2",
"moment": "^2.29.1",
"mongoose": "^5.13.3",
"multer": "^1.4.2",
"multer-gridfs-storage": "^5.0.2",
"path": "^0.12.7",
"pg-promise": "^10.11.1",
"postgres-migrations": "^5.3.0",
"rimraf": "^3.0.2"
"postgres-migrations": "^5.3.0"
},
"devDependencies": {
"nodemon": "^2.0.14"

View file

@ -1,144 +0,0 @@
//Module Imports
const { resolve: resolvePath } = require("path");
const {
existsSync: fexists,
mkdirSync: mkdir,
readdirSync: readdir,
unlinkSync: fremove,
} = require("fs");
const config = require("../config.json");
const mongoose = require("mongoose");
mongoose.connect(
`mongodb://${process.env.NUBIAN_MONGO_HOST}/nubian?authSource=admin`,
{
useUnifiedTopology: true,
useNewUrlParser: true,
user: process.env.NUBIAN_MONGO_USERNAME,
pass: process.env.NUBIAN_MONGO_PASSWORD,
}
);
const users = require("../schemas/user");
const files = require("../schemas/file");
function authorizedToView(userId, file) {
if (`${file.owner}` == (userId = `${userId}`) || file.public) return true;
if (file.view.includes(userId) || file.edit.includes(userId)) return true;
return false;
}
function authorizedToEdit(userId, file) {
return `${file.owner}` == `${userId}` || file.edit.includes(`${userId}`);
}
function getFile(userId, fileId) {
return files.findOne({ _id: fileId }).then((file) => {
if (authorizedToView(userId, file)) return file;
return null;
});
}
function deleteFiles(userId, fileIds) {
return files.find({ _id: { $in: fileIds } }).then((databaseFiles) => {
var failed = [];
var toRemove = [];
filesByOwner = {};
databaseFiles.forEach((file) => {
if (file.owner in filesByOwner) filesByOwner[file.owner].push(file);
else filesByOwner[file.owner] = [file];
});
for (var owner in filesByOwner) {
var deleteSize = 0;
for (var i = filesByOwner[owner].length - 1; i >= 0; i--) {
let file = filesByOwner[owner][i];
if (!authorizedToEdit(userId, file)) {
failed.push(`${file._id}`);
filesByOwner[owner].splice(i, 1);
} else deleteSize += file.size;
}
fileIds = fileIds.filter((fileId) => !failed.includes(fileId));
toRemove.concat(
databaseFiles.filter((file) => fileIds.includes(`${file._id}`))
);
// Update User's owned
users
.updateOne(
{ cairoId: owner, usedStorage: { $gte: deleteSize } },
{
$pull: { owned: { $in: fileIds } },
$inc: {
usedStorage: -deleteSize,
},
}
)
.exec();
// Update files DB
files.deleteMany({ _id: { $in: fileIds } }).exec();
console.log(toRemove);
}
return { files: toRemove, failed };
});
}
function publicfyFiles(userId, targetFiles) {
return files.find({ _id: { $in: targetFiles } }).then((databaseFiles) => {
var failed = [];
databaseFiles.forEach((file) => {
if (!authorizedToEdit(userId, file)) failed.push(`${file._id}`);
else files.updateOne({ _id: file._id }, { public: !file.public }).exec();
});
return failed;
});
}
function createUser(cairoId) {
return users.create({
cairoId,
usedStorage: 0,
storage: config.Storage.UserStorageSize * config.Storage.UserStorageUnit,
owned: [],
shared: [],
});
}
function getUserByCairoId(cairoId) {
return users.findOne({ cairoId }).then((user) => {
if (!user) return createUser(cairoId);
return user;
});
}
function uploadFile(cairoId, fileData) {
return getUserByCairoId(cairoId).then((user) => {
if (user.usedStorage + fileData.size > user.storage) return null;
return users
.updateOne({ cairoId }, { $inc: { usedStorage: fileData.size } })
.then(() => createFile(cairoId, fileData))
.then((file) => {
if (file == null) return null;
users.updateOne({ cairoId }, { $push: { owned: file._id } }).then();
return file;
});
});
}
function createFile(userId, fileData) {
return files.create({
path: fileData.path,
owner: userId,
name: fileData.originalname,
date: fileData.filename.substring(0, fileData.filename.indexOf("-")),
size: fileData.size,
public: false,
edit: [],
view: [],
});
}
module.exports = {
deleteFiles,
getFile,
createUser,
getUserByCairoId,
publicfyFiles,
uploadFile,
};

View file

@ -1,41 +0,0 @@
//Module Imports
const fs = require("fs");
const { resolve: resolvePath } = require("path");
const multer = require("multer");
//Local Imports
const config = require("../config.json");
//Multer Configs
const userUploadStorage = multer.diskStorage({
destination: (req, file, cb) =>
cb(null, userUploadDestination(req.user.cairoId)),
filename: (req, file, cb) => {
const n = file.originalname.replaceAll(" ", "_");
const fileName = `${Date.now()}-${n}`;
req.on("aborted", () =>
cancelUpload(
resolvePath(userUploadDestination(req.user.cairoId), fileName)
)
);
cb(null, fileName);
},
});
const userUpload = multer({
storage: userUploadStorage,
}).single("user-selected-file");
//Helper Methods
function userUploadDestination(user_id) {
if (!fs.existsSync("uploads")) fs.mkdirSync("uploads");
const destination = resolvePath(`uploads/${user_id}`);
if (!fs.existsSync(destination)) fs.mkdirSync(destination);
return destination;
}
function cancelUpload(path) {
if (path != null && fs.existsSync(path)) fs.unlinkSync(path);
}
module.exports = {
userUpload,
cancelUpload,
};

View file

@ -1,89 +0,0 @@
//Module Imports
const { resolve: resolvePath } = require("path");
const { existsSync: fexists, unlinkSync: fremove } = require("fs");
//Local Imports
const storage = require("./storage");
const config = require("../config.json");
function load(cairoId) {
return storage.getUserByCairoId(cairoId);
}
/**
* Create a user with a cairoId (should use Dunestorm API to login)
*/
function createUser(cairoId) {
storage.createUser(cairoId);
}
/**
* Creates file entry given aspects of a file updated
*/
function uploadFile(cairoId, fileData) {
return storage.uploadFile(cairoId, fileData);
}
/**
* Deletes files.
* Requires cairoId to garuntee permission to delete a file
* Sorts files by user before deleting to speed up reference updates
*/
function deleteFiles(cairoId, targetFiles) {
return storage.deleteFiles(cairoId, targetFiles).then((deleteData) => {
var files = deleteData.files;
var deleteFails = deleteData.failed;
files.forEach((file) => {
try {
fremove(file.path);
} catch (e) {
console.error("Error deleting file ", file.name, "\nPath:", file.path);
deleteFails.push(`${file._id}`);
}
});
return deleteFails.length > 0 && deleteFails;
});
}
/**
* Returns a list of filecairoIds that the user owns
*/
function getOwnedFiles(cairoId, fileList) {
var files = new Array(fileList.length);
fileList.forEach(
(file, i) =>
(files[i] = new Promise((resolve, reject) =>
storage.getFile(cairoId, file).then(resolve).catch(reject)
))
);
return Promise.all(files);
}
/**
* TODO: Impliment Advanced Sharing
* Shares file with various people, and various permissions
*/
function shareFile(cairoId, targetFile) {
console.log(cairoId, "requesting to share file");
console.log(targetFile);
}
/**
* TODO: Impliment Advanced Sharing
* Returns all files shared with a user
*/
function getSharedFiles(cairoId) {
return storage.getSharedFileList(cairoId);
}
/**
* Checks if a the user is the owner and then toggles the list of files to public
*/
function publicfyFiles(cairoId, files) {
var publicfyFails = [];
storage.publicfyFiles(cairoId, files);
return publicfyFails.length > 0 && publicfyFails;
}
module.exports = {
createUser,
uploadFile,
deleteFiles,
getOwnedFiles,
publicfyFiles,
shareFile,
getSharedFiles,
load,
};

View file

@ -1,12 +0,0 @@
{
"Storage": {
"UserStorageSize": 25,
"UserStorageUnit": 1048576,
"UploadMaxSize": ""
},
"Server": {
"Port": 52001,
"Debug": false,
"BodyLimit": "5mb"
}
}

53
src/crud/files.js Normal file
View file

@ -0,0 +1,53 @@
// Imports
const {
insertQuery,
selectWhereAllQuery,
selectWhereAnyQuery,
updateWhereAnyQuery,
deleteQuery,
} = require("../utils/postgres-common");
// Constants
const table = "files";
// Queries
const insertFile = (owner, filename, original_filename, filesize) => {
var query = insertQuery(table, {
owner,
filename,
original_filename,
filesize,
public: false,
edit: [],
view: [],
});
query += `\nRETURNING *;`;
return PG.query(query);
};
const queryOwnedFiles = (owner) => {
var query = selectWhereAllQuery(table, { owner });
return PG.query(query);
};
const deleteFile = (fileId) => {
var query = deleteQuery(table, { id: fileId });
query += "\nRETURNING *";
return PG.query(query);
};
const getFile = (fileId) => {
const query = selectWhereAllQuery(table, { id: fileId });
return PG.query(query);
};
const setPublic = (fileId) => {
const query = `UPDATE ${table} SET public = NOT public WHERE id='${fileId}'`;
return PG.query(query);
};
module.exports = {
insertFile,
queryOwnedFiles,
deleteFile,
getFile,
setPublic,
};

36
src/crud/users.js Normal file
View file

@ -0,0 +1,36 @@
// Imports
const {
insertQuery,
selectWhereAnyQuery,
updateWhereAnyQuery,
} = require("../utils/postgres-common");
// Constants
const table = "users";
const defaultStorage = process.env.NUBIAN_DEFAULT_STORAGE ?? 0;
// Queries
const upsertUser = (cairo_id) => {
var query = insertQuery(table, {
cairo_id,
storage: defaultStorage,
used_storage: 0,
});
query += `ON CONFLICT DO NOTHING;\n`;
query += selectWhereAnyQuery(table, { cairo_id });
return PG.query(query);
};
const queryUserByCairoId = (cairo_id) => {
const query = selectWhereAnyQuery(table, { cairo_id });
return PG.query(query);
};
const updateUsedStorage = (cairo_id, used_storage) => {
const query = updateWhereAnyQuery(table, { used_storage }, { cairo_id });
return PG.query(query);
};
module.exports = {
upsertUser,
queryUserByCairoId,
updateUsedStorage,
};

View file

@ -0,0 +1,6 @@
CREATE TABLE users (
cairo_id varchar(31) DEFAULT NULL PRIMARY KEY,
storage bigint DEFAULT 0,
used_storage bigint DEFAULT 0,
CONSTRAINT unique_cairo_id UNIQUE(cairo_id)
);

View file

@ -0,0 +1,15 @@
CREATE SEQUENCE files_id_seq;
CREATE TABLE files (
id bigint NOT NULL DEFAULT nextval('files_id_seq') PRIMARY KEY,
owner varchar(31) DEFAULT NULL,
filename varchar(255) DEFAULT NULL,
original_filename varchar(255) DEFAULT NULL,
filesize bigint DEFAULT 0,
created timestamp DEFAULT timezone('utc', now()),
modified timestamp DEFAULT timezone('utc', now()),
public boolean DEFAULT FALSE,
edit varchar(31)[] DEFAULT NULL,
view varchar(31)[] DEFAULT NULL,
CONSTRAINT unique_file_id UNIQUE(id)
);
ALTER SEQUENCE files_id_seq OWNED BY files.id;

View file

@ -1,27 +1,20 @@
// Global configuration
const globals = require("./utils/globals");
// Imports
const { cyanBright, magentaBright } = require("chalk");
const express = require("express");
const bodyParser = require("body-parser");
const bearerToken = require("express-bearer-token");
// Local Imports
const { Server } = require("./config.json");
// Import Routers
const stashRouter = require("./routes/stash-route");
// Define Constants & Setup Database
const figlet = require("figlet");
// Local imports
const postgres = require("./utils/postgres.js");
// Constants
const port = process.env.NUBIAN_DEV_PORT ?? 52001;
const app = express();
const port = Server.Port;
const timeout = 10 * 60 * 1000; // 10 minutes
// Set Up Express session and View engine
app.use(bearerToken());
app.use(bodyParser.json({ limit: Server.BodyLimit }));
app.use(bodyParser.urlencoded({ limit: Server.BodyLimit, extended: false }));
// Routes
app.use(require("./routes/vitals-router"));
app.use("/api/stash", stashRouter);
const startServer = () => {
var server = app.listen(port, () => {
console.log("Node version:" + process.versions.node);
console.log(`Duneserver listening on port ${port}!`);
});
server.timeout = timeout;
server.on("connection", (socket) => socket.setTimeout(timeout));
};
startServer();
app.use("/api/stash", require("./routes/stash-route"));
console.log(cyanBright(figlet.textSync("Nubian", "Dr Pepper")));
console.log(magentaBright(`Up at: ${new Date()}`));
postgres.configure().then(async () => {
const upMsg = `Webserver listening on port ${port}!`;
app.listen(port, () => logSuccess("EXPRESS", upMsg));
});

View file

@ -0,0 +1,51 @@
// Imports
const axios = require("axios");
const express = require("express");
const authMiddleware = express.Router();
const authOptMiddleware = express.Router();
const bearerTokenMiddleware = require("express-bearer-token")();
const crudUsers = require("../crud/users");
// Constants
const { CAIRO_URL } = process.env;
// Methods
const cairoAuthenticate = async (token) => {
const config = { headers: { Authorization: `Bearer ${token}` } };
return axios
.get(`${CAIRO_URL}/api/user/info`, config)
.then((res) => res.data);
};
const cairoAuthOptMiddleware = (req, res, next) => {
if (!req.token) next();
else
cairoAuthenticate(req.token)
.then((authData) => crudUsers.upsertUser(authData.id))
.then((users) => (req.user = users[0]))
.catch()
.then(() => next());
};
// Middleware
const cairoAuthMiddleware = (req, res, next) => {
if (!req.token) return res.status(401).send("Cairo token required!");
logVerbose("AUTH", `${CAIRO_URL}/api/user/info`);
cairoAuthenticate(req.token)
.then((authData) => crudUsers.upsertUser(authData.id))
.then((users) => (req.user = users[0]))
.then(() => next())
.catch((err) => {
logError("AUTH", err.response ? err.response.data : err.message);
if (!err.response) return res.status(500).send(`Auth failure ${err}`);
return res.status(err.response.status).send(err.response.data);
});
};
module.exports = {
authMiddleware: authMiddleware.use([
bearerTokenMiddleware,
cairoAuthMiddleware,
]),
authOptMiddleware: authOptMiddleware.use([
bearerTokenMiddleware,
cairoAuthOptMiddleware,
]),
};

View file

@ -0,0 +1,2 @@
const bodyParserMiddleware = require("express").json({ limit: "50mb" });
module.exports = bodyParserMiddleware;

View file

@ -0,0 +1,2 @@
const urlEncodedMiddleware = require("express").urlencoded({ limit: "500mb" });
module.exports = urlEncodedMiddleware;

View file

@ -1,86 +1,107 @@
const express = require("express");
const axios = require("axios");
// Local Imports & Configs
const asUser = require("../api/user");
const upload = require("../api/upload");
const storage = require("../api/storage");
const config = require("../config.json");
// Establish path and create router
// Absolute Router Path /api/stash
const router = express.Router();
const cairoMiddleware = (req, res, next) => {
if (req.token == null) return next();
else
axios
.get(`${process.env.CAIRO_URL}/api/user/info`, {
headers: { authorization: `Bearer ${req.token}` },
})
.then((authRes) => {
if (authRes.data && authRes.data.id) {
asUser.load(authRes.data.id).then((user) => {
req.user = user;
next();
});
} else res.status(500).json(authRes.data);
})
.catch((e) => {
if (e.response)
return res
.status(401)
.send(`Auth server responded with: ${e.response.status}`);
console.error(e);
res.sendStatus(500);
});
};
const authMiddleware = (req, res, next) => {
if (req.token == null) return res.sendStatus(401);
next();
};
router.use(cairoMiddleware);
router.get("/files", authMiddleware, (req, res) =>
asUser.getOwnedFiles(req.user.cairoId, req.user.owned).then((files) => {
res.status(200).json(files);
})
);
router.post("/upload", authMiddleware, (req, res) => {
upload.userUpload(req, res, (err) => {
if (err || req.file == null) return res.sendStatus(500);
asUser.uploadFile(req.user.cairoId, req.file).then((file) => {
if (file != null) return res.json(file);
upload.cancelUpload(req.file.path);
return res.sendStatus(500);
});
});
// Imports
const router = require("express").Router();
const storageUtil = require("../utils/storage-util");
const crudFiles = require("../crud/files");
const crudUsers = require("../crud/users");
// Constants
const bucket = "uploads";
const container = (req) => req.user.cairo_id;
const uploadField = "user-selected-file";
const upload = storageUtil.buildBucketUpload(bucket, uploadField, container);
const upsertMiddleware = [authMiddleware, bodyParserMiddleware];
const uploadMiddleware = [authMiddleware, urlEncodedMiddleware, upload];
router.get("/files", authMiddleware, (req, res) => {
crudFiles.queryOwnedFiles(req.user.cairo_id).then((files) => res.json(files));
});
router.post("/delete", authMiddleware, (req, res) => {
if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400);
asUser.deleteFiles(req.user.cairoId, req.body).then((failed) => {
if (!failed) return res.sendStatus(200);
res.status(500).json(failed);
});
router.post("/upload", uploadMiddleware, async (req, res) => {
const users = await crudUsers.queryUserByCairoId(req.user.cairo_id);
const user = users[0];
user.used_storage = parseInt(user.used_storage);
user.storage = parseInt(user.storage);
if (req.file.size + user.used_storage > user.storage) {
storageUtil.cancelUpload(req, req.file.filename);
return res.sendStatus(500);
}
const fileInsert = crudFiles.insertFile(
req.user.cairo_id,
req.file.filename,
req.file.originalname,
req.file.size
);
const userInsert = crudUsers.updateUsedStorage(
req.user.cairo_id,
req.file.size + user.used_storage
);
Promise.all([fileInsert, userInsert])
.then((results) => res.json(results[0][0]))
.catch((err) => res.sendStatus(500));
});
router.get("/download", (req, res) => {
if (!req.query || (!req.query.target && !req.query.zipTarget))
return res.sendStatus(404);
const userId = req.user == null ? null : req.user.cairoId;
if (req.query.target)
return storage.getFile(userId, req.query.target).then((file) => {
if (file) return res.download(file.path);
return res.sendStatus(404);
});
return res.sendStatus(404);
router.post("/delete", upsertMiddleware, (req, res) => {
if (!req.body || !Array.isArray(req.body))
return res.sendStatus(400).send("Files must be an array!");
const files = req.body;
// Technically this should be checked to see if they own it/can edit it
// but timecrunch so will fix later
const queries = files.map((fileId) =>
crudFiles.deleteFile(fileId).then((files) => {
try {
storageUtil.deleteUpload(bucket, req.user.cairo_id, files[0].filename);
return files[0];
} catch (err) {
logError("STASH", `Could not delete file(s)`, files);
return { filesize: 0 };
}
})
);
Promise.all(queries)
.then((results) => {
var deletedData = 0;
results.forEach((file) => (deletedData += file.filesize));
crudUsers
.updateUsedStorage(
req.user.cairo_id,
Math.max(parseInt(req.user.used_storage) - deletedData, 0)
)
.catch((err) =>
logError(
"STASH",
"Could not update storage for ",
req.user.cairo_id,
err
)
);
res.sendStatus(200);
})
.catch((err) => res.status(500).json([]));
// Delete the file
});
router.post("/public", authMiddleware, async (req, res) => {
if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400);
const failed = asUser.publicfyFiles(req.user.cairoId, req.body);
if (!failed) return res.sendStatus(200);
res.status(500).json(failed);
router.get("/download", authOptMiddleware, async (req, res) => {
if (!req.query || !req.query.target) return res.sendStatus(404);
const files = await crudFiles.getFile(req.query.target);
if (files.length !== 1) return res.sendStatus(404);
const file = files[0];
file.view = file.view ?? [];
file.edit = file.edit ?? [];
var fpath = storageUtil.fetchUpload(bucket, file.owner, file.filename);
if (file.public) return res.download(fpath); // Send if public
if (!req.user) return res.sendStatus(401);
const userId = req.user.cairo_id;
if (file.owner === userId) return res.download(fpath); // Send if owner
if (file.edit.includes(userId) || file.view.includes(userId))
return res.download(fpath); // Send if view/edit
return res.sendStatus(401);
});
router.post("/public", upsertMiddleware, async (req, res) => {
if (!req.body || !Array.isArray(req.body))
return res.sendStatus(400).send("Files must be an array!");
req.body.forEach((fileId) => crudFiles.setPublic(fileId));
res.sendStatus(200);
});
module.exports = router;

View file

@ -1,40 +0,0 @@
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const ObjId = mongoose.Types.ObjectId;
const file = new Schema(
{
path: {
type: String,
required: true,
},
owner: { type: String},
name: {
type: String,
required: true,
},
date: {
type: String,
required: true,
},
size: {
type: Number,
required: true,
},
public: {
type: Boolean,
required: true,
},
edit: {
type: [],
required: true,
},
view: {
type: [],
required: true,
},
},
{ collection: "files" }
);
module.exports = mongoose.model("file", file);

View file

@ -1,30 +0,0 @@
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const ObjId = mongoose.Types.ObjectId;
const user = new Schema(
{
cairoId: {
type: String,
required: true,
},
storage: {
type: Number,
required: true,
},
usedStorage: {
type: Number,
required: true,
},
owned: {
type: [ObjId],
required: true,
},
shared: {
type: [],
required: true,
},
},
{ collection: "users" }
);
module.exports = mongoose.model("user", user);

18
src/utils/globals.js Normal file
View file

@ -0,0 +1,18 @@
// Envar Overrides
process.env.FORCE_COLOR = 1;
// Utils
const logging = require("./logging");
// Middlewares
const bodyParserMiddleware = require("../middlewares/body-parser");
const urlEncodedMiddleware = require("../middlewares/url-encoded");
const authMiddlewares = require("../middlewares/auth-middleware");
// Logging Globals
global.logInfo = logging.logInfo;
global.logError = logging.logError;
global.logSuccess = logging.logSuccess;
global.logVerbose = logging.logVerbose;
// Middlewares
global.bodyParserMiddleware = bodyParserMiddleware;
global.authMiddleware = authMiddlewares.authMiddleware;
global.authOptMiddleware = authMiddlewares.authOptMiddleware;
global.urlEncodedMiddleware = urlEncodedMiddleware;

20
src/utils/logging.js Normal file
View file

@ -0,0 +1,20 @@
// Imports
const { redBright, greenBright, cyanBright, magentaBright } = require("chalk");
// Force terminal to use color
process.env.FORCE_COLOR = "1";
// Methods
const logCustom = (color, header, ...info) =>
console.log(color(`[${header}]`), ...info);
// Extensions
const logInfo = (header, ...args) => logCustom(cyanBright, header, ...args);
const logError = (header, ...args) => logCustom(redBright, header, ...args);
const logSuccess = (header, ...args) => logCustom(greenBright, header, ...args);
const logVerbose = (header, ...args) =>
logCustom(magentaBright, header, ...args);
module.exports = {
logInfo,
logError,
logSuccess,
logVerbose,
};

View file

@ -0,0 +1,127 @@
const buildPostgresEntry = (entry) => {
const pgEntry = { ...entry };
Object.keys(pgEntry).forEach((col) => {
if (pgEntry[col] === undefined) delete pgEntry[col];
});
return pgEntry;
};
const buildPostgresValue = (jsVar) => {
if (jsVar === null) return "null";
if (typeof jsVar === "string") return buildPostgresString(jsVar);
if (Array.isArray(jsVar) && jsVar.length === 0) return "null";
if (Array.isArray(jsVar) && isTypeArray(jsVar, "string"))
return buildPostgresStringArray(jsVar);
return jsVar;
};
const buildPostgresStringArray = (jsonArray) => {
if (jsonArray.length === 0) return null;
var pgArray = [...jsonArray];
var arrayString = "ARRAY [";
pgArray.forEach((e, i) => (pgArray[i] = `'${e}'`));
arrayString += pgArray.join(",");
arrayString += "]";
return arrayString;
};
const isTypeArray = (jsonArray, type) =>
jsonArray.every((e) => typeof e === type);
const buildPostgresString = (jsonString) =>
(jsonString && `'${jsonString.replaceAll("'", "''")}'`) || null;
const insertQuery = (table, jsEntry) => {
if (typeof jsEntry !== "object") throw Error("PG Inserts must be objects!");
const entry = buildPostgresEntry(jsEntry);
const cols = Object.keys(entry);
cols.forEach((col, i) => {
entry[col] = buildPostgresValue(entry[col]);
col[i] = `"${col}"`;
});
var query = `INSERT INTO ${table}(${cols.join(",")})\n`;
query += `VALUES(${Object.values(entry).join(",")})`;
return query;
};
const deleteQuery = (table, jsEntry) => {
if (typeof jsEntry !== "object")
throw Error("PG Delete conditionals must be an object!");
const entry = buildPostgresEntry(jsEntry);
const cols = Object.keys(entry);
const conditionals = [];
for (var col of cols) {
entry[col] = buildPostgresValue(entry[col]);
conditionals.push(`x.${col}=${entry[col]}`);
}
return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`;
};
const onConflictUpdate = (conflicts, updates) => {
if (!Array.isArray(conflicts)) throw Error("PG Conflicts must be an array!");
if (typeof updates !== "object") throw Error("PG Updates must be objects!");
const entry = buildPostgresEntry(updates);
var query = `ON CONFLICT (${conflicts.join(",")}) DO UPDATE SET\n`;
const cols = Object.keys(entry);
for (var col of cols) {
entry[col] = buildPostgresValue(entry[col]);
}
query += cols.map((c) => `${c}=${entry[c]}`).join(",");
return query;
};
const clearTableQuery = (table) => {
return `TRUNCATE ${table}`;
};
const selectWhereQuery = (table, jsEntry, joinWith) => {
if (typeof jsEntry !== "object") throw Error("PG Where must be an object!");
const where = buildPostgresEntry(jsEntry);
const cols = Object.keys(where);
var query = `SELECT * FROM ${table} AS x WHERE\n`;
for (var col of cols) {
where[col] = buildPostgresValue(where[col]);
}
return (query += cols.map((c) => `x.${c}=${where[c]}`).join(joinWith));
};
const updateWhereQuery = (table, updates, wheres, joinWith) => {
if (typeof updates !== "object") throw Error("PG Updates must be an object!");
if (typeof wheres !== "object") throw Error("PG Wheres must be an object!");
const update = buildPostgresEntry(updates);
const where = buildPostgresEntry(wheres);
const updateCols = Object.keys(update);
const whereCols = Object.keys(where);
var query = `UPDATE ${table}\n`;
var updateQuery = updateCols
.map((c) => `${c} = ${buildPostgresValue(update[c])}`)
.join(",");
var whereQuery = whereCols
.map((c) => `${c} = ${buildPostgresValue(where[c])}`)
.join(joinWith);
return (query += `SET ${updateQuery} WHERE ${whereQuery}`);
};
const updateWhereAnyQuery = (table, updates, wheres) =>
updateWhereQuery(table, updates, wheres, " OR ");
const updateWhereAllQuery = (table, updates, wheres) =>
updateWhereQuery(table, updates, wheres, " AND ");
const selectWhereAnyQuery = (table, where) =>
selectWhereQuery(table, where, " OR ");
const selectWhereAllQuery = (table, where) =>
selectWhereQuery(table, where, " AND ");
module.exports = {
selectWhereAnyQuery,
selectWhereAllQuery,
updateWhereAnyQuery,
updateWhereAllQuery,
insertQuery,
deleteQuery,
buildPostgresValue,
onConflictUpdate,
clearTableQuery,
};

31
src/utils/postgres.js Normal file
View file

@ -0,0 +1,31 @@
// Imports
const { migrate } = require("postgres-migrations");
const pgp = require("pg-promise")();
const moment = require("moment");
// Ensure dates get saved as UTC date strings
// This prevents the parser from doing strange datetime operations
pgp.pg.types.setTypeParser(1114, (str) => moment.utc(str).format());
// Constants
const NUBIAN_POSTGRES_DISABLED = process.env.NUBIAN_POSTGRES_DISABLED ?? false;
const database = "nubian";
const dbConfig = {
database,
user: process.env.NUBIAN_POSTGRES_USER,
password: process.env.NUBIAN_POSTGRES_PASSWORD,
host: process.env.NUBIAN_POSTGRES_HOST,
port: 5432,
ensureDatabaseExists: true,
};
const configure = async () => {
if (NUBIAN_POSTGRES_DISABLED) return logInfo("POSTGRES", "Postgres Disabled");
await migrate(dbConfig, "src/db-migrations");
// Override the global variable DB
global.PG = pgp(dbConfig);
await PG.connect();
logSuccess("POSTGRES", `Connected to database ${database}!`);
};
module.exports = { configure };

55
src/utils/storage-util.js Normal file
View file

@ -0,0 +1,55 @@
// Imports
const fs = require("fs");
const { resolve: rsPath } = require("path");
const multer = require("multer");
// Constants
const bucketsPath = process.env.NUBIAN_BUCKET_PATH ?? "buckets/";
// Methods
const bucketStorage = (bucketStoragePath, containerCb) =>
multer.diskStorage({
destination: (req, file, cb) =>
cb(null, upsertDestination(req, bucketStoragePath, containerCb)),
filename: (req, file, cb) => {
const n = file.originalname.replaceAll(" ", "_");
const fileName = `${Date.now()}-${n}`;
req.on("aborted", () => cancelUpload(req, fileName));
cb(null, fileName);
},
});
const buildBucketUpload = (bucketStoragePath, fieldName, containerCb) =>
multer({
storage: bucketStorage(bucketStoragePath, containerCb),
}).single(fieldName);
const cancelUpload = (req, fileName) => {
const path = rsPath(req.bucketStoragePath, fileName);
deletePath(path);
};
const deletePath = (path) => fs.existsSync(path) && fs.unlinkSync(path);
const deleteUpload = (bucketStoragePath, container, filename) =>
deletePath(fetchUpload(bucketStoragePath, container, filename));
const fetchUpload = (bucketStoragePath, container, filename) =>
rsPath(bucketsPath, bucketStoragePath, container, filename);
const upsertDestination = (req, bucketStoragePath, containerCb) => {
var container = "";
if (containerCb) {
if (typeof containerCb !== "function")
throw Error("Container Callback must be a function!");
container = containerCb(req);
}
req.bucketStoragePath = rsPath(bucketsPath, bucketStoragePath, container);
fs.mkdirSync(req.bucketStoragePath, { recursive: true });
return req.bucketStoragePath;
};
module.exports = {
buildBucketUpload,
cancelUpload,
deleteUpload,
fetchUpload,
};