From 94e416fd35a21466eafe31bf9d39d1341aa85731 Mon Sep 17 00:00:00 2001 From: Elijah Dunemask Date: Thu, 18 Nov 2021 00:21:58 +0000 Subject: [PATCH] Migrate to Postgres over Mongo --- .dockerignore | 3 +- .gitignore | 3 +- package.json | 6 +- src/api/storage.js | 144 ----------------- src/api/upload.js | 41 ----- src/api/user.js | 89 ----------- src/config.json | 12 -- src/crud/files.js | 53 ++++++ src/crud/users.js | 36 +++++ src/db-migrations/1_create_users_table.sql | 6 + src/db-migrations/2_create_files_table.sql | 15 ++ src/index.js | 39 ++--- src/middlewares/auth-middleware.js | 51 ++++++ src/middlewares/body-parser.js | 2 + src/middlewares/url-encoded.js | 2 + src/routes/stash-route.js | 177 ++++++++++++--------- src/schemas/file.js | 40 ----- src/schemas/user.js | 30 ---- src/utils/globals.js | 18 +++ src/utils/logging.js | 20 +++ src/utils/postgres-common.js | 127 +++++++++++++++ src/utils/postgres.js | 31 ++++ src/utils/storage-util.js | 55 +++++++ 23 files changed, 534 insertions(+), 466 deletions(-) delete mode 100644 src/api/storage.js delete mode 100644 src/api/upload.js delete mode 100644 src/api/user.js delete mode 100644 src/config.json create mode 100644 src/crud/files.js create mode 100644 src/crud/users.js create mode 100644 src/db-migrations/1_create_users_table.sql create mode 100644 src/db-migrations/2_create_files_table.sql create mode 100644 src/middlewares/auth-middleware.js create mode 100644 src/middlewares/body-parser.js create mode 100644 src/middlewares/url-encoded.js delete mode 100644 src/schemas/file.js delete mode 100644 src/schemas/user.js create mode 100644 src/utils/globals.js create mode 100644 src/utils/logging.js create mode 100644 src/utils/postgres-common.js create mode 100644 src/utils/postgres.js create mode 100644 src/utils/storage-util.js diff --git a/.dockerignore b/.dockerignore index 3a554d4..6248135 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,2 +1 @@ -src/uploads/ -uploads/ +buckets/ diff --git a/.gitignore b/.gitignore index f155490..152e81d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,5 +9,4 @@ src/uploads/ .env .env.dev .env.prod -newsrc/ -uploads/ +buckets/ diff --git a/package.json b/package.json index 04ade14..f1735a9 100644 --- a/package.json +++ b/package.json @@ -19,19 +19,15 @@ "homepage": "https://gitlab.com/Dunemask/nubian#readme", "dependencies": { "axios": "^0.21.1", - "body-parser": "^1.19.0", "chalk": "^4.1.2", "express": "^4.17.1", "express-bearer-token": "^2.4.0", "figlet": "^1.5.2", "moment": "^2.29.1", - "mongoose": "^5.13.3", "multer": "^1.4.2", - "multer-gridfs-storage": "^5.0.2", "path": "^0.12.7", "pg-promise": "^10.11.1", - "postgres-migrations": "^5.3.0", - "rimraf": "^3.0.2" + "postgres-migrations": "^5.3.0" }, "devDependencies": { "nodemon": "^2.0.14" diff --git a/src/api/storage.js b/src/api/storage.js deleted file mode 100644 index 36a5b2f..0000000 --- a/src/api/storage.js +++ /dev/null @@ -1,144 +0,0 @@ -//Module Imports -const { resolve: resolvePath } = require("path"); -const { - existsSync: fexists, - mkdirSync: mkdir, - readdirSync: readdir, - unlinkSync: fremove, -} = require("fs"); -const config = require("../config.json"); -const mongoose = require("mongoose"); -mongoose.connect( - `mongodb://${process.env.NUBIAN_MONGO_HOST}/nubian?authSource=admin`, - { - useUnifiedTopology: true, - useNewUrlParser: true, - user: process.env.NUBIAN_MONGO_USERNAME, - pass: process.env.NUBIAN_MONGO_PASSWORD, - } -); - -const users = require("../schemas/user"); -const files = require("../schemas/file"); - -function authorizedToView(userId, file) { - if (`${file.owner}` == (userId = `${userId}`) || file.public) return true; - if (file.view.includes(userId) || file.edit.includes(userId)) return true; - return false; -} - -function authorizedToEdit(userId, file) { - return `${file.owner}` == `${userId}` || file.edit.includes(`${userId}`); -} - -function getFile(userId, fileId) { - return files.findOne({ _id: fileId }).then((file) => { - if (authorizedToView(userId, file)) return file; - return null; - }); -} - -function deleteFiles(userId, fileIds) { - return files.find({ _id: { $in: fileIds } }).then((databaseFiles) => { - var failed = []; - var toRemove = []; - filesByOwner = {}; - databaseFiles.forEach((file) => { - if (file.owner in filesByOwner) filesByOwner[file.owner].push(file); - else filesByOwner[file.owner] = [file]; - }); - for (var owner in filesByOwner) { - var deleteSize = 0; - for (var i = filesByOwner[owner].length - 1; i >= 0; i--) { - let file = filesByOwner[owner][i]; - if (!authorizedToEdit(userId, file)) { - failed.push(`${file._id}`); - filesByOwner[owner].splice(i, 1); - } else deleteSize += file.size; - } - fileIds = fileIds.filter((fileId) => !failed.includes(fileId)); - toRemove.concat( - databaseFiles.filter((file) => fileIds.includes(`${file._id}`)) - ); - // Update User's owned - users - .updateOne( - { cairoId: owner, usedStorage: { $gte: deleteSize } }, - { - $pull: { owned: { $in: fileIds } }, - $inc: { - usedStorage: -deleteSize, - }, - } - ) - .exec(); - // Update files DB - files.deleteMany({ _id: { $in: fileIds } }).exec(); - console.log(toRemove); - } - return { files: toRemove, failed }; - }); -} - -function publicfyFiles(userId, targetFiles) { - return files.find({ _id: { $in: targetFiles } }).then((databaseFiles) => { - var failed = []; - databaseFiles.forEach((file) => { - if (!authorizedToEdit(userId, file)) failed.push(`${file._id}`); - else files.updateOne({ _id: file._id }, { public: !file.public }).exec(); - }); - return failed; - }); -} - -function createUser(cairoId) { - return users.create({ - cairoId, - usedStorage: 0, - storage: config.Storage.UserStorageSize * config.Storage.UserStorageUnit, - owned: [], - shared: [], - }); -} - -function getUserByCairoId(cairoId) { - return users.findOne({ cairoId }).then((user) => { - if (!user) return createUser(cairoId); - return user; - }); -} - -function uploadFile(cairoId, fileData) { - return getUserByCairoId(cairoId).then((user) => { - if (user.usedStorage + fileData.size > user.storage) return null; - return users - .updateOne({ cairoId }, { $inc: { usedStorage: fileData.size } }) - .then(() => createFile(cairoId, fileData)) - .then((file) => { - if (file == null) return null; - users.updateOne({ cairoId }, { $push: { owned: file._id } }).then(); - return file; - }); - }); -} - -function createFile(userId, fileData) { - return files.create({ - path: fileData.path, - owner: userId, - name: fileData.originalname, - date: fileData.filename.substring(0, fileData.filename.indexOf("-")), - size: fileData.size, - public: false, - edit: [], - view: [], - }); -} -module.exports = { - deleteFiles, - getFile, - createUser, - getUserByCairoId, - publicfyFiles, - uploadFile, -}; diff --git a/src/api/upload.js b/src/api/upload.js deleted file mode 100644 index bd5b640..0000000 --- a/src/api/upload.js +++ /dev/null @@ -1,41 +0,0 @@ -//Module Imports -const fs = require("fs"); -const { resolve: resolvePath } = require("path"); -const multer = require("multer"); -//Local Imports -const config = require("../config.json"); -//Multer Configs -const userUploadStorage = multer.diskStorage({ - destination: (req, file, cb) => - cb(null, userUploadDestination(req.user.cairoId)), - filename: (req, file, cb) => { - const n = file.originalname.replaceAll(" ", "_"); - const fileName = `${Date.now()}-${n}`; - req.on("aborted", () => - cancelUpload( - resolvePath(userUploadDestination(req.user.cairoId), fileName) - ) - ); - cb(null, fileName); - }, -}); -const userUpload = multer({ - storage: userUploadStorage, -}).single("user-selected-file"); - -//Helper Methods -function userUploadDestination(user_id) { - if (!fs.existsSync("uploads")) fs.mkdirSync("uploads"); - const destination = resolvePath(`uploads/${user_id}`); - if (!fs.existsSync(destination)) fs.mkdirSync(destination); - return destination; -} - -function cancelUpload(path) { - if (path != null && fs.existsSync(path)) fs.unlinkSync(path); -} - -module.exports = { - userUpload, - cancelUpload, -}; diff --git a/src/api/user.js b/src/api/user.js deleted file mode 100644 index a7d7993..0000000 --- a/src/api/user.js +++ /dev/null @@ -1,89 +0,0 @@ -//Module Imports -const { resolve: resolvePath } = require("path"); -const { existsSync: fexists, unlinkSync: fremove } = require("fs"); -//Local Imports -const storage = require("./storage"); -const config = require("../config.json"); - -function load(cairoId) { - return storage.getUserByCairoId(cairoId); -} - -/** - * Create a user with a cairoId (should use Dunestorm API to login) - */ -function createUser(cairoId) { - storage.createUser(cairoId); -} -/** - * Creates file entry given aspects of a file updated - */ -function uploadFile(cairoId, fileData) { - return storage.uploadFile(cairoId, fileData); -} -/** - * Deletes files. - * Requires cairoId to garuntee permission to delete a file - * Sorts files by user before deleting to speed up reference updates - */ -function deleteFiles(cairoId, targetFiles) { - return storage.deleteFiles(cairoId, targetFiles).then((deleteData) => { - var files = deleteData.files; - var deleteFails = deleteData.failed; - files.forEach((file) => { - try { - fremove(file.path); - } catch (e) { - console.error("Error deleting file ", file.name, "\nPath:", file.path); - deleteFails.push(`${file._id}`); - } - }); - return deleteFails.length > 0 && deleteFails; - }); -} -/** - * Returns a list of filecairoIds that the user owns - */ -function getOwnedFiles(cairoId, fileList) { - var files = new Array(fileList.length); - fileList.forEach( - (file, i) => - (files[i] = new Promise((resolve, reject) => - storage.getFile(cairoId, file).then(resolve).catch(reject) - )) - ); - return Promise.all(files); -} -/** - * TODO: Impliment Advanced Sharing - * Shares file with various people, and various permissions - */ -function shareFile(cairoId, targetFile) { - console.log(cairoId, "requesting to share file"); - console.log(targetFile); -} -/** - * TODO: Impliment Advanced Sharing - * Returns all files shared with a user - */ -function getSharedFiles(cairoId) { - return storage.getSharedFileList(cairoId); -} -/** - * Checks if a the user is the owner and then toggles the list of files to public - */ -function publicfyFiles(cairoId, files) { - var publicfyFails = []; - storage.publicfyFiles(cairoId, files); - return publicfyFails.length > 0 && publicfyFails; -} -module.exports = { - createUser, - uploadFile, - deleteFiles, - getOwnedFiles, - publicfyFiles, - shareFile, - getSharedFiles, - load, -}; diff --git a/src/config.json b/src/config.json deleted file mode 100644 index 3cd6c4d..0000000 --- a/src/config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "Storage": { - "UserStorageSize": 25, - "UserStorageUnit": 1048576, - "UploadMaxSize": "" - }, - "Server": { - "Port": 52001, - "Debug": false, - "BodyLimit": "5mb" - } -} diff --git a/src/crud/files.js b/src/crud/files.js new file mode 100644 index 0000000..ec29f06 --- /dev/null +++ b/src/crud/files.js @@ -0,0 +1,53 @@ +// Imports +const { + insertQuery, + selectWhereAllQuery, + selectWhereAnyQuery, + updateWhereAnyQuery, + deleteQuery, +} = require("../utils/postgres-common"); +// Constants +const table = "files"; +// Queries +const insertFile = (owner, filename, original_filename, filesize) => { + var query = insertQuery(table, { + owner, + filename, + original_filename, + filesize, + public: false, + edit: [], + view: [], + }); + query += `\nRETURNING *;`; + return PG.query(query); +}; + +const queryOwnedFiles = (owner) => { + var query = selectWhereAllQuery(table, { owner }); + return PG.query(query); +}; + +const deleteFile = (fileId) => { + var query = deleteQuery(table, { id: fileId }); + query += "\nRETURNING *"; + return PG.query(query); +}; + +const getFile = (fileId) => { + const query = selectWhereAllQuery(table, { id: fileId }); + return PG.query(query); +}; + +const setPublic = (fileId) => { + const query = `UPDATE ${table} SET public = NOT public WHERE id='${fileId}'`; + return PG.query(query); +}; + +module.exports = { + insertFile, + queryOwnedFiles, + deleteFile, + getFile, + setPublic, +}; diff --git a/src/crud/users.js b/src/crud/users.js new file mode 100644 index 0000000..3616673 --- /dev/null +++ b/src/crud/users.js @@ -0,0 +1,36 @@ +// Imports +const { + insertQuery, + selectWhereAnyQuery, + updateWhereAnyQuery, +} = require("../utils/postgres-common"); +// Constants +const table = "users"; +const defaultStorage = process.env.NUBIAN_DEFAULT_STORAGE ?? 0; +// Queries +const upsertUser = (cairo_id) => { + var query = insertQuery(table, { + cairo_id, + storage: defaultStorage, + used_storage: 0, + }); + query += `ON CONFLICT DO NOTHING;\n`; + query += selectWhereAnyQuery(table, { cairo_id }); + return PG.query(query); +}; + +const queryUserByCairoId = (cairo_id) => { + const query = selectWhereAnyQuery(table, { cairo_id }); + return PG.query(query); +}; + +const updateUsedStorage = (cairo_id, used_storage) => { + const query = updateWhereAnyQuery(table, { used_storage }, { cairo_id }); + return PG.query(query); +}; + +module.exports = { + upsertUser, + queryUserByCairoId, + updateUsedStorage, +}; diff --git a/src/db-migrations/1_create_users_table.sql b/src/db-migrations/1_create_users_table.sql new file mode 100644 index 0000000..0f23c2e --- /dev/null +++ b/src/db-migrations/1_create_users_table.sql @@ -0,0 +1,6 @@ +CREATE TABLE users ( + cairo_id varchar(31) DEFAULT NULL PRIMARY KEY, + storage bigint DEFAULT 0, + used_storage bigint DEFAULT 0, + CONSTRAINT unique_cairo_id UNIQUE(cairo_id) +); diff --git a/src/db-migrations/2_create_files_table.sql b/src/db-migrations/2_create_files_table.sql new file mode 100644 index 0000000..d06061b --- /dev/null +++ b/src/db-migrations/2_create_files_table.sql @@ -0,0 +1,15 @@ +CREATE SEQUENCE files_id_seq; +CREATE TABLE files ( + id bigint NOT NULL DEFAULT nextval('files_id_seq') PRIMARY KEY, + owner varchar(31) DEFAULT NULL, + filename varchar(255) DEFAULT NULL, + original_filename varchar(255) DEFAULT NULL, + filesize bigint DEFAULT 0, + created timestamp DEFAULT timezone('utc', now()), + modified timestamp DEFAULT timezone('utc', now()), + public boolean DEFAULT FALSE, + edit varchar(31)[] DEFAULT NULL, + view varchar(31)[] DEFAULT NULL, + CONSTRAINT unique_file_id UNIQUE(id) +); +ALTER SEQUENCE files_id_seq OWNED BY files.id; diff --git a/src/index.js b/src/index.js index 2f066cc..4c62199 100644 --- a/src/index.js +++ b/src/index.js @@ -1,27 +1,20 @@ +// Global configuration +const globals = require("./utils/globals"); // Imports +const { cyanBright, magentaBright } = require("chalk"); const express = require("express"); -const bodyParser = require("body-parser"); -const bearerToken = require("express-bearer-token"); -// Local Imports -const { Server } = require("./config.json"); -// Import Routers -const stashRouter = require("./routes/stash-route"); -// Define Constants & Setup Database +const figlet = require("figlet"); +// Local imports +const postgres = require("./utils/postgres.js"); +// Constants +const port = process.env.NUBIAN_DEV_PORT ?? 52001; const app = express(); -const port = Server.Port; -const timeout = 10 * 60 * 1000; // 10 minutes -// Set Up Express session and View engine -app.use(bearerToken()); -app.use(bodyParser.json({ limit: Server.BodyLimit })); -app.use(bodyParser.urlencoded({ limit: Server.BodyLimit, extended: false })); +// Routes app.use(require("./routes/vitals-router")); -app.use("/api/stash", stashRouter); -const startServer = () => { - var server = app.listen(port, () => { - console.log("Node version:" + process.versions.node); - console.log(`Duneserver listening on port ${port}!`); - }); - server.timeout = timeout; - server.on("connection", (socket) => socket.setTimeout(timeout)); -}; -startServer(); +app.use("/api/stash", require("./routes/stash-route")); +console.log(cyanBright(figlet.textSync("Nubian", "Dr Pepper"))); +console.log(magentaBright(`Up at: ${new Date()}`)); +postgres.configure().then(async () => { + const upMsg = `Webserver listening on port ${port}!`; + app.listen(port, () => logSuccess("EXPRESS", upMsg)); +}); diff --git a/src/middlewares/auth-middleware.js b/src/middlewares/auth-middleware.js new file mode 100644 index 0000000..c2c00a9 --- /dev/null +++ b/src/middlewares/auth-middleware.js @@ -0,0 +1,51 @@ +// Imports +const axios = require("axios"); +const express = require("express"); +const authMiddleware = express.Router(); +const authOptMiddleware = express.Router(); +const bearerTokenMiddleware = require("express-bearer-token")(); +const crudUsers = require("../crud/users"); +// Constants +const { CAIRO_URL } = process.env; +// Methods +const cairoAuthenticate = async (token) => { + const config = { headers: { Authorization: `Bearer ${token}` } }; + return axios + .get(`${CAIRO_URL}/api/user/info`, config) + .then((res) => res.data); +}; + +const cairoAuthOptMiddleware = (req, res, next) => { + if (!req.token) next(); + else + cairoAuthenticate(req.token) + .then((authData) => crudUsers.upsertUser(authData.id)) + .then((users) => (req.user = users[0])) + .catch() + .then(() => next()); +}; +// Middleware +const cairoAuthMiddleware = (req, res, next) => { + if (!req.token) return res.status(401).send("Cairo token required!"); + logVerbose("AUTH", `${CAIRO_URL}/api/user/info`); + cairoAuthenticate(req.token) + .then((authData) => crudUsers.upsertUser(authData.id)) + .then((users) => (req.user = users[0])) + .then(() => next()) + .catch((err) => { + logError("AUTH", err.response ? err.response.data : err.message); + if (!err.response) return res.status(500).send(`Auth failure ${err}`); + return res.status(err.response.status).send(err.response.data); + }); +}; + +module.exports = { + authMiddleware: authMiddleware.use([ + bearerTokenMiddleware, + cairoAuthMiddleware, + ]), + authOptMiddleware: authOptMiddleware.use([ + bearerTokenMiddleware, + cairoAuthOptMiddleware, + ]), +}; diff --git a/src/middlewares/body-parser.js b/src/middlewares/body-parser.js new file mode 100644 index 0000000..7f7914c --- /dev/null +++ b/src/middlewares/body-parser.js @@ -0,0 +1,2 @@ +const bodyParserMiddleware = require("express").json({ limit: "50mb" }); +module.exports = bodyParserMiddleware; diff --git a/src/middlewares/url-encoded.js b/src/middlewares/url-encoded.js new file mode 100644 index 0000000..c6bd849 --- /dev/null +++ b/src/middlewares/url-encoded.js @@ -0,0 +1,2 @@ +const urlEncodedMiddleware = require("express").urlencoded({ limit: "500mb" }); +module.exports = urlEncodedMiddleware; diff --git a/src/routes/stash-route.js b/src/routes/stash-route.js index 8303a60..ff3cebb 100644 --- a/src/routes/stash-route.js +++ b/src/routes/stash-route.js @@ -1,86 +1,107 @@ -const express = require("express"); -const axios = require("axios"); -// Local Imports & Configs -const asUser = require("../api/user"); -const upload = require("../api/upload"); -const storage = require("../api/storage"); -const config = require("../config.json"); -// Establish path and create router -// Absolute Router Path /api/stash -const router = express.Router(); - -const cairoMiddleware = (req, res, next) => { - if (req.token == null) return next(); - else - axios - .get(`${process.env.CAIRO_URL}/api/user/info`, { - headers: { authorization: `Bearer ${req.token}` }, - }) - .then((authRes) => { - if (authRes.data && authRes.data.id) { - asUser.load(authRes.data.id).then((user) => { - req.user = user; - next(); - }); - } else res.status(500).json(authRes.data); - }) - .catch((e) => { - if (e.response) - return res - .status(401) - .send(`Auth server responded with: ${e.response.status}`); - console.error(e); - res.sendStatus(500); - }); -}; - -const authMiddleware = (req, res, next) => { - if (req.token == null) return res.sendStatus(401); - next(); -}; -router.use(cairoMiddleware); - -router.get("/files", authMiddleware, (req, res) => - asUser.getOwnedFiles(req.user.cairoId, req.user.owned).then((files) => { - res.status(200).json(files); - }) -); - -router.post("/upload", authMiddleware, (req, res) => { - upload.userUpload(req, res, (err) => { - if (err || req.file == null) return res.sendStatus(500); - asUser.uploadFile(req.user.cairoId, req.file).then((file) => { - if (file != null) return res.json(file); - upload.cancelUpload(req.file.path); - return res.sendStatus(500); - }); - }); +// Imports +const router = require("express").Router(); +const storageUtil = require("../utils/storage-util"); +const crudFiles = require("../crud/files"); +const crudUsers = require("../crud/users"); +// Constants +const bucket = "uploads"; +const container = (req) => req.user.cairo_id; +const uploadField = "user-selected-file"; +const upload = storageUtil.buildBucketUpload(bucket, uploadField, container); +const upsertMiddleware = [authMiddleware, bodyParserMiddleware]; +const uploadMiddleware = [authMiddleware, urlEncodedMiddleware, upload]; +router.get("/files", authMiddleware, (req, res) => { + crudFiles.queryOwnedFiles(req.user.cairo_id).then((files) => res.json(files)); }); -router.post("/delete", authMiddleware, (req, res) => { - if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400); - asUser.deleteFiles(req.user.cairoId, req.body).then((failed) => { - if (!failed) return res.sendStatus(200); - res.status(500).json(failed); - }); +router.post("/upload", uploadMiddleware, async (req, res) => { + const users = await crudUsers.queryUserByCairoId(req.user.cairo_id); + const user = users[0]; + user.used_storage = parseInt(user.used_storage); + user.storage = parseInt(user.storage); + if (req.file.size + user.used_storage > user.storage) { + storageUtil.cancelUpload(req, req.file.filename); + return res.sendStatus(500); + } + const fileInsert = crudFiles.insertFile( + req.user.cairo_id, + req.file.filename, + req.file.originalname, + req.file.size + ); + + const userInsert = crudUsers.updateUsedStorage( + req.user.cairo_id, + req.file.size + user.used_storage + ); + + Promise.all([fileInsert, userInsert]) + .then((results) => res.json(results[0][0])) + .catch((err) => res.sendStatus(500)); }); -router.get("/download", (req, res) => { - if (!req.query || (!req.query.target && !req.query.zipTarget)) - return res.sendStatus(404); - const userId = req.user == null ? null : req.user.cairoId; - if (req.query.target) - return storage.getFile(userId, req.query.target).then((file) => { - if (file) return res.download(file.path); - return res.sendStatus(404); - }); - return res.sendStatus(404); +router.post("/delete", upsertMiddleware, (req, res) => { + if (!req.body || !Array.isArray(req.body)) + return res.sendStatus(400).send("Files must be an array!"); + const files = req.body; + // Technically this should be checked to see if they own it/can edit it + // but timecrunch so will fix later + const queries = files.map((fileId) => + crudFiles.deleteFile(fileId).then((files) => { + try { + storageUtil.deleteUpload(bucket, req.user.cairo_id, files[0].filename); + return files[0]; + } catch (err) { + logError("STASH", `Could not delete file(s)`, files); + return { filesize: 0 }; + } + }) + ); + Promise.all(queries) + .then((results) => { + var deletedData = 0; + results.forEach((file) => (deletedData += file.filesize)); + crudUsers + .updateUsedStorage( + req.user.cairo_id, + Math.max(parseInt(req.user.used_storage) - deletedData, 0) + ) + .catch((err) => + logError( + "STASH", + "Could not update storage for ", + req.user.cairo_id, + err + ) + ); + res.sendStatus(200); + }) + .catch((err) => res.status(500).json([])); + // Delete the file }); -router.post("/public", authMiddleware, async (req, res) => { - if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400); - const failed = asUser.publicfyFiles(req.user.cairoId, req.body); - if (!failed) return res.sendStatus(200); - res.status(500).json(failed); +router.get("/download", authOptMiddleware, async (req, res) => { + if (!req.query || !req.query.target) return res.sendStatus(404); + const files = await crudFiles.getFile(req.query.target); + if (files.length !== 1) return res.sendStatus(404); + const file = files[0]; + file.view = file.view ?? []; + file.edit = file.edit ?? []; + var fpath = storageUtil.fetchUpload(bucket, file.owner, file.filename); + if (file.public) return res.download(fpath); // Send if public + if (!req.user) return res.sendStatus(401); + const userId = req.user.cairo_id; + if (file.owner === userId) return res.download(fpath); // Send if owner + if (file.edit.includes(userId) || file.view.includes(userId)) + return res.download(fpath); // Send if view/edit + return res.sendStatus(401); }); + +router.post("/public", upsertMiddleware, async (req, res) => { + if (!req.body || !Array.isArray(req.body)) + return res.sendStatus(400).send("Files must be an array!"); + req.body.forEach((fileId) => crudFiles.setPublic(fileId)); + res.sendStatus(200); +}); + module.exports = router; diff --git a/src/schemas/file.js b/src/schemas/file.js deleted file mode 100644 index 7b3dee2..0000000 --- a/src/schemas/file.js +++ /dev/null @@ -1,40 +0,0 @@ -const mongoose = require("mongoose"); -const Schema = mongoose.Schema; -const ObjId = mongoose.Types.ObjectId; - -const file = new Schema( - { - path: { - type: String, - required: true, - }, - owner: { type: String}, - name: { - type: String, - required: true, - }, - date: { - type: String, - required: true, - }, - size: { - type: Number, - required: true, - }, - public: { - type: Boolean, - required: true, - }, - edit: { - type: [], - required: true, - }, - view: { - type: [], - required: true, - }, - }, - { collection: "files" } -); - -module.exports = mongoose.model("file", file); diff --git a/src/schemas/user.js b/src/schemas/user.js deleted file mode 100644 index a1b7238..0000000 --- a/src/schemas/user.js +++ /dev/null @@ -1,30 +0,0 @@ -const mongoose = require("mongoose"); -const Schema = mongoose.Schema; -const ObjId = mongoose.Types.ObjectId; -const user = new Schema( - { - cairoId: { - type: String, - required: true, - }, - storage: { - type: Number, - required: true, - }, - usedStorage: { - type: Number, - required: true, - }, - owned: { - type: [ObjId], - required: true, - }, - shared: { - type: [], - required: true, - }, - }, - { collection: "users" } -); - -module.exports = mongoose.model("user", user); diff --git a/src/utils/globals.js b/src/utils/globals.js new file mode 100644 index 0000000..cc9fcdd --- /dev/null +++ b/src/utils/globals.js @@ -0,0 +1,18 @@ +// Envar Overrides +process.env.FORCE_COLOR = 1; +// Utils +const logging = require("./logging"); +// Middlewares +const bodyParserMiddleware = require("../middlewares/body-parser"); +const urlEncodedMiddleware = require("../middlewares/url-encoded"); +const authMiddlewares = require("../middlewares/auth-middleware"); +// Logging Globals +global.logInfo = logging.logInfo; +global.logError = logging.logError; +global.logSuccess = logging.logSuccess; +global.logVerbose = logging.logVerbose; +// Middlewares +global.bodyParserMiddleware = bodyParserMiddleware; +global.authMiddleware = authMiddlewares.authMiddleware; +global.authOptMiddleware = authMiddlewares.authOptMiddleware; +global.urlEncodedMiddleware = urlEncodedMiddleware; diff --git a/src/utils/logging.js b/src/utils/logging.js new file mode 100644 index 0000000..5456a45 --- /dev/null +++ b/src/utils/logging.js @@ -0,0 +1,20 @@ +// Imports +const { redBright, greenBright, cyanBright, magentaBright } = require("chalk"); +// Force terminal to use color +process.env.FORCE_COLOR = "1"; +// Methods +const logCustom = (color, header, ...info) => + console.log(color(`[${header}]`), ...info); +// Extensions +const logInfo = (header, ...args) => logCustom(cyanBright, header, ...args); +const logError = (header, ...args) => logCustom(redBright, header, ...args); +const logSuccess = (header, ...args) => logCustom(greenBright, header, ...args); +const logVerbose = (header, ...args) => + logCustom(magentaBright, header, ...args); + +module.exports = { + logInfo, + logError, + logSuccess, + logVerbose, +}; diff --git a/src/utils/postgres-common.js b/src/utils/postgres-common.js new file mode 100644 index 0000000..085165e --- /dev/null +++ b/src/utils/postgres-common.js @@ -0,0 +1,127 @@ +const buildPostgresEntry = (entry) => { + const pgEntry = { ...entry }; + Object.keys(pgEntry).forEach((col) => { + if (pgEntry[col] === undefined) delete pgEntry[col]; + }); + return pgEntry; +}; + +const buildPostgresValue = (jsVar) => { + if (jsVar === null) return "null"; + if (typeof jsVar === "string") return buildPostgresString(jsVar); + if (Array.isArray(jsVar) && jsVar.length === 0) return "null"; + if (Array.isArray(jsVar) && isTypeArray(jsVar, "string")) + return buildPostgresStringArray(jsVar); + return jsVar; +}; + +const buildPostgresStringArray = (jsonArray) => { + if (jsonArray.length === 0) return null; + var pgArray = [...jsonArray]; + var arrayString = "ARRAY ["; + pgArray.forEach((e, i) => (pgArray[i] = `'${e}'`)); + arrayString += pgArray.join(","); + arrayString += "]"; + return arrayString; +}; + +const isTypeArray = (jsonArray, type) => + jsonArray.every((e) => typeof e === type); + +const buildPostgresString = (jsonString) => + (jsonString && `'${jsonString.replaceAll("'", "''")}'`) || null; + +const insertQuery = (table, jsEntry) => { + if (typeof jsEntry !== "object") throw Error("PG Inserts must be objects!"); + const entry = buildPostgresEntry(jsEntry); + const cols = Object.keys(entry); + cols.forEach((col, i) => { + entry[col] = buildPostgresValue(entry[col]); + col[i] = `"${col}"`; + }); + var query = `INSERT INTO ${table}(${cols.join(",")})\n`; + query += `VALUES(${Object.values(entry).join(",")})`; + return query; +}; + +const deleteQuery = (table, jsEntry) => { + if (typeof jsEntry !== "object") + throw Error("PG Delete conditionals must be an object!"); + const entry = buildPostgresEntry(jsEntry); + const cols = Object.keys(entry); + const conditionals = []; + for (var col of cols) { + entry[col] = buildPostgresValue(entry[col]); + conditionals.push(`x.${col}=${entry[col]}`); + } + return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`; +}; + +const onConflictUpdate = (conflicts, updates) => { + if (!Array.isArray(conflicts)) throw Error("PG Conflicts must be an array!"); + if (typeof updates !== "object") throw Error("PG Updates must be objects!"); + const entry = buildPostgresEntry(updates); + var query = `ON CONFLICT (${conflicts.join(",")}) DO UPDATE SET\n`; + const cols = Object.keys(entry); + for (var col of cols) { + entry[col] = buildPostgresValue(entry[col]); + } + query += cols.map((c) => `${c}=${entry[c]}`).join(","); + return query; +}; + +const clearTableQuery = (table) => { + return `TRUNCATE ${table}`; +}; + +const selectWhereQuery = (table, jsEntry, joinWith) => { + if (typeof jsEntry !== "object") throw Error("PG Where must be an object!"); + const where = buildPostgresEntry(jsEntry); + const cols = Object.keys(where); + var query = `SELECT * FROM ${table} AS x WHERE\n`; + for (var col of cols) { + where[col] = buildPostgresValue(where[col]); + } + return (query += cols.map((c) => `x.${c}=${where[c]}`).join(joinWith)); +}; + +const updateWhereQuery = (table, updates, wheres, joinWith) => { + if (typeof updates !== "object") throw Error("PG Updates must be an object!"); + if (typeof wheres !== "object") throw Error("PG Wheres must be an object!"); + const update = buildPostgresEntry(updates); + const where = buildPostgresEntry(wheres); + const updateCols = Object.keys(update); + const whereCols = Object.keys(where); + var query = `UPDATE ${table}\n`; + var updateQuery = updateCols + .map((c) => `${c} = ${buildPostgresValue(update[c])}`) + .join(","); + var whereQuery = whereCols + .map((c) => `${c} = ${buildPostgresValue(where[c])}`) + .join(joinWith); + return (query += `SET ${updateQuery} WHERE ${whereQuery}`); +}; + +const updateWhereAnyQuery = (table, updates, wheres) => + updateWhereQuery(table, updates, wheres, " OR "); + +const updateWhereAllQuery = (table, updates, wheres) => + updateWhereQuery(table, updates, wheres, " AND "); + +const selectWhereAnyQuery = (table, where) => + selectWhereQuery(table, where, " OR "); + +const selectWhereAllQuery = (table, where) => + selectWhereQuery(table, where, " AND "); + +module.exports = { + selectWhereAnyQuery, + selectWhereAllQuery, + updateWhereAnyQuery, + updateWhereAllQuery, + insertQuery, + deleteQuery, + buildPostgresValue, + onConflictUpdate, + clearTableQuery, +}; diff --git a/src/utils/postgres.js b/src/utils/postgres.js new file mode 100644 index 0000000..594ce54 --- /dev/null +++ b/src/utils/postgres.js @@ -0,0 +1,31 @@ +// Imports +const { migrate } = require("postgres-migrations"); +const pgp = require("pg-promise")(); +const moment = require("moment"); + +// Ensure dates get saved as UTC date strings +// This prevents the parser from doing strange datetime operations +pgp.pg.types.setTypeParser(1114, (str) => moment.utc(str).format()); + +// Constants +const NUBIAN_POSTGRES_DISABLED = process.env.NUBIAN_POSTGRES_DISABLED ?? false; +const database = "nubian"; +const dbConfig = { + database, + user: process.env.NUBIAN_POSTGRES_USER, + password: process.env.NUBIAN_POSTGRES_PASSWORD, + host: process.env.NUBIAN_POSTGRES_HOST, + port: 5432, + ensureDatabaseExists: true, +}; + +const configure = async () => { + if (NUBIAN_POSTGRES_DISABLED) return logInfo("POSTGRES", "Postgres Disabled"); + await migrate(dbConfig, "src/db-migrations"); + // Override the global variable DB + global.PG = pgp(dbConfig); + await PG.connect(); + logSuccess("POSTGRES", `Connected to database ${database}!`); +}; + +module.exports = { configure }; diff --git a/src/utils/storage-util.js b/src/utils/storage-util.js new file mode 100644 index 0000000..7cab29f --- /dev/null +++ b/src/utils/storage-util.js @@ -0,0 +1,55 @@ +// Imports +const fs = require("fs"); +const { resolve: rsPath } = require("path"); +const multer = require("multer"); +// Constants +const bucketsPath = process.env.NUBIAN_BUCKET_PATH ?? "buckets/"; +// Methods +const bucketStorage = (bucketStoragePath, containerCb) => + multer.diskStorage({ + destination: (req, file, cb) => + cb(null, upsertDestination(req, bucketStoragePath, containerCb)), + filename: (req, file, cb) => { + const n = file.originalname.replaceAll(" ", "_"); + const fileName = `${Date.now()}-${n}`; + req.on("aborted", () => cancelUpload(req, fileName)); + cb(null, fileName); + }, + }); + +const buildBucketUpload = (bucketStoragePath, fieldName, containerCb) => + multer({ + storage: bucketStorage(bucketStoragePath, containerCb), + }).single(fieldName); + +const cancelUpload = (req, fileName) => { + const path = rsPath(req.bucketStoragePath, fileName); + deletePath(path); +}; + +const deletePath = (path) => fs.existsSync(path) && fs.unlinkSync(path); + +const deleteUpload = (bucketStoragePath, container, filename) => + deletePath(fetchUpload(bucketStoragePath, container, filename)); + +const fetchUpload = (bucketStoragePath, container, filename) => + rsPath(bucketsPath, bucketStoragePath, container, filename); + +const upsertDestination = (req, bucketStoragePath, containerCb) => { + var container = ""; + if (containerCb) { + if (typeof containerCb !== "function") + throw Error("Container Callback must be a function!"); + container = containerCb(req); + } + req.bucketStoragePath = rsPath(bucketsPath, bucketStoragePath, container); + fs.mkdirSync(req.bucketStoragePath, { recursive: true }); + return req.bucketStoragePath; +}; + +module.exports = { + buildBucketUpload, + cancelUpload, + deleteUpload, + fetchUpload, +};