Migrate to Postgres over Mongo
This commit is contained in:
parent
23b1033085
commit
94e416fd35
23 changed files with 534 additions and 466 deletions
|
@ -1,2 +1 @@
|
||||||
src/uploads/
|
buckets/
|
||||||
uploads/
|
|
||||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -9,5 +9,4 @@ src/uploads/
|
||||||
.env
|
.env
|
||||||
.env.dev
|
.env.dev
|
||||||
.env.prod
|
.env.prod
|
||||||
newsrc/
|
buckets/
|
||||||
uploads/
|
|
||||||
|
|
|
@ -19,19 +19,15 @@
|
||||||
"homepage": "https://gitlab.com/Dunemask/nubian#readme",
|
"homepage": "https://gitlab.com/Dunemask/nubian#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"axios": "^0.21.1",
|
"axios": "^0.21.1",
|
||||||
"body-parser": "^1.19.0",
|
|
||||||
"chalk": "^4.1.2",
|
"chalk": "^4.1.2",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"express-bearer-token": "^2.4.0",
|
"express-bearer-token": "^2.4.0",
|
||||||
"figlet": "^1.5.2",
|
"figlet": "^1.5.2",
|
||||||
"moment": "^2.29.1",
|
"moment": "^2.29.1",
|
||||||
"mongoose": "^5.13.3",
|
|
||||||
"multer": "^1.4.2",
|
"multer": "^1.4.2",
|
||||||
"multer-gridfs-storage": "^5.0.2",
|
|
||||||
"path": "^0.12.7",
|
"path": "^0.12.7",
|
||||||
"pg-promise": "^10.11.1",
|
"pg-promise": "^10.11.1",
|
||||||
"postgres-migrations": "^5.3.0",
|
"postgres-migrations": "^5.3.0"
|
||||||
"rimraf": "^3.0.2"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"nodemon": "^2.0.14"
|
"nodemon": "^2.0.14"
|
||||||
|
|
|
@ -1,144 +0,0 @@
|
||||||
//Module Imports
|
|
||||||
const { resolve: resolvePath } = require("path");
|
|
||||||
const {
|
|
||||||
existsSync: fexists,
|
|
||||||
mkdirSync: mkdir,
|
|
||||||
readdirSync: readdir,
|
|
||||||
unlinkSync: fremove,
|
|
||||||
} = require("fs");
|
|
||||||
const config = require("../config.json");
|
|
||||||
const mongoose = require("mongoose");
|
|
||||||
mongoose.connect(
|
|
||||||
`mongodb://${process.env.NUBIAN_MONGO_HOST}/nubian?authSource=admin`,
|
|
||||||
{
|
|
||||||
useUnifiedTopology: true,
|
|
||||||
useNewUrlParser: true,
|
|
||||||
user: process.env.NUBIAN_MONGO_USERNAME,
|
|
||||||
pass: process.env.NUBIAN_MONGO_PASSWORD,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const users = require("../schemas/user");
|
|
||||||
const files = require("../schemas/file");
|
|
||||||
|
|
||||||
function authorizedToView(userId, file) {
|
|
||||||
if (`${file.owner}` == (userId = `${userId}`) || file.public) return true;
|
|
||||||
if (file.view.includes(userId) || file.edit.includes(userId)) return true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function authorizedToEdit(userId, file) {
|
|
||||||
return `${file.owner}` == `${userId}` || file.edit.includes(`${userId}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getFile(userId, fileId) {
|
|
||||||
return files.findOne({ _id: fileId }).then((file) => {
|
|
||||||
if (authorizedToView(userId, file)) return file;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function deleteFiles(userId, fileIds) {
|
|
||||||
return files.find({ _id: { $in: fileIds } }).then((databaseFiles) => {
|
|
||||||
var failed = [];
|
|
||||||
var toRemove = [];
|
|
||||||
filesByOwner = {};
|
|
||||||
databaseFiles.forEach((file) => {
|
|
||||||
if (file.owner in filesByOwner) filesByOwner[file.owner].push(file);
|
|
||||||
else filesByOwner[file.owner] = [file];
|
|
||||||
});
|
|
||||||
for (var owner in filesByOwner) {
|
|
||||||
var deleteSize = 0;
|
|
||||||
for (var i = filesByOwner[owner].length - 1; i >= 0; i--) {
|
|
||||||
let file = filesByOwner[owner][i];
|
|
||||||
if (!authorizedToEdit(userId, file)) {
|
|
||||||
failed.push(`${file._id}`);
|
|
||||||
filesByOwner[owner].splice(i, 1);
|
|
||||||
} else deleteSize += file.size;
|
|
||||||
}
|
|
||||||
fileIds = fileIds.filter((fileId) => !failed.includes(fileId));
|
|
||||||
toRemove.concat(
|
|
||||||
databaseFiles.filter((file) => fileIds.includes(`${file._id}`))
|
|
||||||
);
|
|
||||||
// Update User's owned
|
|
||||||
users
|
|
||||||
.updateOne(
|
|
||||||
{ cairoId: owner, usedStorage: { $gte: deleteSize } },
|
|
||||||
{
|
|
||||||
$pull: { owned: { $in: fileIds } },
|
|
||||||
$inc: {
|
|
||||||
usedStorage: -deleteSize,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.exec();
|
|
||||||
// Update files DB
|
|
||||||
files.deleteMany({ _id: { $in: fileIds } }).exec();
|
|
||||||
console.log(toRemove);
|
|
||||||
}
|
|
||||||
return { files: toRemove, failed };
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function publicfyFiles(userId, targetFiles) {
|
|
||||||
return files.find({ _id: { $in: targetFiles } }).then((databaseFiles) => {
|
|
||||||
var failed = [];
|
|
||||||
databaseFiles.forEach((file) => {
|
|
||||||
if (!authorizedToEdit(userId, file)) failed.push(`${file._id}`);
|
|
||||||
else files.updateOne({ _id: file._id }, { public: !file.public }).exec();
|
|
||||||
});
|
|
||||||
return failed;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function createUser(cairoId) {
|
|
||||||
return users.create({
|
|
||||||
cairoId,
|
|
||||||
usedStorage: 0,
|
|
||||||
storage: config.Storage.UserStorageSize * config.Storage.UserStorageUnit,
|
|
||||||
owned: [],
|
|
||||||
shared: [],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function getUserByCairoId(cairoId) {
|
|
||||||
return users.findOne({ cairoId }).then((user) => {
|
|
||||||
if (!user) return createUser(cairoId);
|
|
||||||
return user;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadFile(cairoId, fileData) {
|
|
||||||
return getUserByCairoId(cairoId).then((user) => {
|
|
||||||
if (user.usedStorage + fileData.size > user.storage) return null;
|
|
||||||
return users
|
|
||||||
.updateOne({ cairoId }, { $inc: { usedStorage: fileData.size } })
|
|
||||||
.then(() => createFile(cairoId, fileData))
|
|
||||||
.then((file) => {
|
|
||||||
if (file == null) return null;
|
|
||||||
users.updateOne({ cairoId }, { $push: { owned: file._id } }).then();
|
|
||||||
return file;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function createFile(userId, fileData) {
|
|
||||||
return files.create({
|
|
||||||
path: fileData.path,
|
|
||||||
owner: userId,
|
|
||||||
name: fileData.originalname,
|
|
||||||
date: fileData.filename.substring(0, fileData.filename.indexOf("-")),
|
|
||||||
size: fileData.size,
|
|
||||||
public: false,
|
|
||||||
edit: [],
|
|
||||||
view: [],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
module.exports = {
|
|
||||||
deleteFiles,
|
|
||||||
getFile,
|
|
||||||
createUser,
|
|
||||||
getUserByCairoId,
|
|
||||||
publicfyFiles,
|
|
||||||
uploadFile,
|
|
||||||
};
|
|
|
@ -1,41 +0,0 @@
|
||||||
//Module Imports
|
|
||||||
const fs = require("fs");
|
|
||||||
const { resolve: resolvePath } = require("path");
|
|
||||||
const multer = require("multer");
|
|
||||||
//Local Imports
|
|
||||||
const config = require("../config.json");
|
|
||||||
//Multer Configs
|
|
||||||
const userUploadStorage = multer.diskStorage({
|
|
||||||
destination: (req, file, cb) =>
|
|
||||||
cb(null, userUploadDestination(req.user.cairoId)),
|
|
||||||
filename: (req, file, cb) => {
|
|
||||||
const n = file.originalname.replaceAll(" ", "_");
|
|
||||||
const fileName = `${Date.now()}-${n}`;
|
|
||||||
req.on("aborted", () =>
|
|
||||||
cancelUpload(
|
|
||||||
resolvePath(userUploadDestination(req.user.cairoId), fileName)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
cb(null, fileName);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const userUpload = multer({
|
|
||||||
storage: userUploadStorage,
|
|
||||||
}).single("user-selected-file");
|
|
||||||
|
|
||||||
//Helper Methods
|
|
||||||
function userUploadDestination(user_id) {
|
|
||||||
if (!fs.existsSync("uploads")) fs.mkdirSync("uploads");
|
|
||||||
const destination = resolvePath(`uploads/${user_id}`);
|
|
||||||
if (!fs.existsSync(destination)) fs.mkdirSync(destination);
|
|
||||||
return destination;
|
|
||||||
}
|
|
||||||
|
|
||||||
function cancelUpload(path) {
|
|
||||||
if (path != null && fs.existsSync(path)) fs.unlinkSync(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
userUpload,
|
|
||||||
cancelUpload,
|
|
||||||
};
|
|
|
@ -1,89 +0,0 @@
|
||||||
//Module Imports
|
|
||||||
const { resolve: resolvePath } = require("path");
|
|
||||||
const { existsSync: fexists, unlinkSync: fremove } = require("fs");
|
|
||||||
//Local Imports
|
|
||||||
const storage = require("./storage");
|
|
||||||
const config = require("../config.json");
|
|
||||||
|
|
||||||
function load(cairoId) {
|
|
||||||
return storage.getUserByCairoId(cairoId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a user with a cairoId (should use Dunestorm API to login)
|
|
||||||
*/
|
|
||||||
function createUser(cairoId) {
|
|
||||||
storage.createUser(cairoId);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Creates file entry given aspects of a file updated
|
|
||||||
*/
|
|
||||||
function uploadFile(cairoId, fileData) {
|
|
||||||
return storage.uploadFile(cairoId, fileData);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Deletes files.
|
|
||||||
* Requires cairoId to garuntee permission to delete a file
|
|
||||||
* Sorts files by user before deleting to speed up reference updates
|
|
||||||
*/
|
|
||||||
function deleteFiles(cairoId, targetFiles) {
|
|
||||||
return storage.deleteFiles(cairoId, targetFiles).then((deleteData) => {
|
|
||||||
var files = deleteData.files;
|
|
||||||
var deleteFails = deleteData.failed;
|
|
||||||
files.forEach((file) => {
|
|
||||||
try {
|
|
||||||
fremove(file.path);
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Error deleting file ", file.name, "\nPath:", file.path);
|
|
||||||
deleteFails.push(`${file._id}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return deleteFails.length > 0 && deleteFails;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a list of filecairoIds that the user owns
|
|
||||||
*/
|
|
||||||
function getOwnedFiles(cairoId, fileList) {
|
|
||||||
var files = new Array(fileList.length);
|
|
||||||
fileList.forEach(
|
|
||||||
(file, i) =>
|
|
||||||
(files[i] = new Promise((resolve, reject) =>
|
|
||||||
storage.getFile(cairoId, file).then(resolve).catch(reject)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
return Promise.all(files);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* TODO: Impliment Advanced Sharing
|
|
||||||
* Shares file with various people, and various permissions
|
|
||||||
*/
|
|
||||||
function shareFile(cairoId, targetFile) {
|
|
||||||
console.log(cairoId, "requesting to share file");
|
|
||||||
console.log(targetFile);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* TODO: Impliment Advanced Sharing
|
|
||||||
* Returns all files shared with a user
|
|
||||||
*/
|
|
||||||
function getSharedFiles(cairoId) {
|
|
||||||
return storage.getSharedFileList(cairoId);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Checks if a the user is the owner and then toggles the list of files to public
|
|
||||||
*/
|
|
||||||
function publicfyFiles(cairoId, files) {
|
|
||||||
var publicfyFails = [];
|
|
||||||
storage.publicfyFiles(cairoId, files);
|
|
||||||
return publicfyFails.length > 0 && publicfyFails;
|
|
||||||
}
|
|
||||||
module.exports = {
|
|
||||||
createUser,
|
|
||||||
uploadFile,
|
|
||||||
deleteFiles,
|
|
||||||
getOwnedFiles,
|
|
||||||
publicfyFiles,
|
|
||||||
shareFile,
|
|
||||||
getSharedFiles,
|
|
||||||
load,
|
|
||||||
};
|
|
|
@ -1,12 +0,0 @@
|
||||||
{
|
|
||||||
"Storage": {
|
|
||||||
"UserStorageSize": 25,
|
|
||||||
"UserStorageUnit": 1048576,
|
|
||||||
"UploadMaxSize": ""
|
|
||||||
},
|
|
||||||
"Server": {
|
|
||||||
"Port": 52001,
|
|
||||||
"Debug": false,
|
|
||||||
"BodyLimit": "5mb"
|
|
||||||
}
|
|
||||||
}
|
|
53
src/crud/files.js
Normal file
53
src/crud/files.js
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
// Imports
|
||||||
|
const {
|
||||||
|
insertQuery,
|
||||||
|
selectWhereAllQuery,
|
||||||
|
selectWhereAnyQuery,
|
||||||
|
updateWhereAnyQuery,
|
||||||
|
deleteQuery,
|
||||||
|
} = require("../utils/postgres-common");
|
||||||
|
// Constants
|
||||||
|
const table = "files";
|
||||||
|
// Queries
|
||||||
|
const insertFile = (owner, filename, original_filename, filesize) => {
|
||||||
|
var query = insertQuery(table, {
|
||||||
|
owner,
|
||||||
|
filename,
|
||||||
|
original_filename,
|
||||||
|
filesize,
|
||||||
|
public: false,
|
||||||
|
edit: [],
|
||||||
|
view: [],
|
||||||
|
});
|
||||||
|
query += `\nRETURNING *;`;
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const queryOwnedFiles = (owner) => {
|
||||||
|
var query = selectWhereAllQuery(table, { owner });
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteFile = (fileId) => {
|
||||||
|
var query = deleteQuery(table, { id: fileId });
|
||||||
|
query += "\nRETURNING *";
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getFile = (fileId) => {
|
||||||
|
const query = selectWhereAllQuery(table, { id: fileId });
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const setPublic = (fileId) => {
|
||||||
|
const query = `UPDATE ${table} SET public = NOT public WHERE id='${fileId}'`;
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
insertFile,
|
||||||
|
queryOwnedFiles,
|
||||||
|
deleteFile,
|
||||||
|
getFile,
|
||||||
|
setPublic,
|
||||||
|
};
|
36
src/crud/users.js
Normal file
36
src/crud/users.js
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
// Imports
|
||||||
|
const {
|
||||||
|
insertQuery,
|
||||||
|
selectWhereAnyQuery,
|
||||||
|
updateWhereAnyQuery,
|
||||||
|
} = require("../utils/postgres-common");
|
||||||
|
// Constants
|
||||||
|
const table = "users";
|
||||||
|
const defaultStorage = process.env.NUBIAN_DEFAULT_STORAGE ?? 0;
|
||||||
|
// Queries
|
||||||
|
const upsertUser = (cairo_id) => {
|
||||||
|
var query = insertQuery(table, {
|
||||||
|
cairo_id,
|
||||||
|
storage: defaultStorage,
|
||||||
|
used_storage: 0,
|
||||||
|
});
|
||||||
|
query += `ON CONFLICT DO NOTHING;\n`;
|
||||||
|
query += selectWhereAnyQuery(table, { cairo_id });
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const queryUserByCairoId = (cairo_id) => {
|
||||||
|
const query = selectWhereAnyQuery(table, { cairo_id });
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateUsedStorage = (cairo_id, used_storage) => {
|
||||||
|
const query = updateWhereAnyQuery(table, { used_storage }, { cairo_id });
|
||||||
|
return PG.query(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
upsertUser,
|
||||||
|
queryUserByCairoId,
|
||||||
|
updateUsedStorage,
|
||||||
|
};
|
6
src/db-migrations/1_create_users_table.sql
Normal file
6
src/db-migrations/1_create_users_table.sql
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
CREATE TABLE users (
|
||||||
|
cairo_id varchar(31) DEFAULT NULL PRIMARY KEY,
|
||||||
|
storage bigint DEFAULT 0,
|
||||||
|
used_storage bigint DEFAULT 0,
|
||||||
|
CONSTRAINT unique_cairo_id UNIQUE(cairo_id)
|
||||||
|
);
|
15
src/db-migrations/2_create_files_table.sql
Normal file
15
src/db-migrations/2_create_files_table.sql
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
CREATE SEQUENCE files_id_seq;
|
||||||
|
CREATE TABLE files (
|
||||||
|
id bigint NOT NULL DEFAULT nextval('files_id_seq') PRIMARY KEY,
|
||||||
|
owner varchar(31) DEFAULT NULL,
|
||||||
|
filename varchar(255) DEFAULT NULL,
|
||||||
|
original_filename varchar(255) DEFAULT NULL,
|
||||||
|
filesize bigint DEFAULT 0,
|
||||||
|
created timestamp DEFAULT timezone('utc', now()),
|
||||||
|
modified timestamp DEFAULT timezone('utc', now()),
|
||||||
|
public boolean DEFAULT FALSE,
|
||||||
|
edit varchar(31)[] DEFAULT NULL,
|
||||||
|
view varchar(31)[] DEFAULT NULL,
|
||||||
|
CONSTRAINT unique_file_id UNIQUE(id)
|
||||||
|
);
|
||||||
|
ALTER SEQUENCE files_id_seq OWNED BY files.id;
|
37
src/index.js
37
src/index.js
|
@ -1,27 +1,20 @@
|
||||||
|
// Global configuration
|
||||||
|
const globals = require("./utils/globals");
|
||||||
// Imports
|
// Imports
|
||||||
|
const { cyanBright, magentaBright } = require("chalk");
|
||||||
const express = require("express");
|
const express = require("express");
|
||||||
const bodyParser = require("body-parser");
|
const figlet = require("figlet");
|
||||||
const bearerToken = require("express-bearer-token");
|
// Local imports
|
||||||
// Local Imports
|
const postgres = require("./utils/postgres.js");
|
||||||
const { Server } = require("./config.json");
|
// Constants
|
||||||
// Import Routers
|
const port = process.env.NUBIAN_DEV_PORT ?? 52001;
|
||||||
const stashRouter = require("./routes/stash-route");
|
|
||||||
// Define Constants & Setup Database
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const port = Server.Port;
|
// Routes
|
||||||
const timeout = 10 * 60 * 1000; // 10 minutes
|
|
||||||
// Set Up Express session and View engine
|
|
||||||
app.use(bearerToken());
|
|
||||||
app.use(bodyParser.json({ limit: Server.BodyLimit }));
|
|
||||||
app.use(bodyParser.urlencoded({ limit: Server.BodyLimit, extended: false }));
|
|
||||||
app.use(require("./routes/vitals-router"));
|
app.use(require("./routes/vitals-router"));
|
||||||
app.use("/api/stash", stashRouter);
|
app.use("/api/stash", require("./routes/stash-route"));
|
||||||
const startServer = () => {
|
console.log(cyanBright(figlet.textSync("Nubian", "Dr Pepper")));
|
||||||
var server = app.listen(port, () => {
|
console.log(magentaBright(`Up at: ${new Date()}`));
|
||||||
console.log("Node version:" + process.versions.node);
|
postgres.configure().then(async () => {
|
||||||
console.log(`Duneserver listening on port ${port}!`);
|
const upMsg = `Webserver listening on port ${port}!`;
|
||||||
|
app.listen(port, () => logSuccess("EXPRESS", upMsg));
|
||||||
});
|
});
|
||||||
server.timeout = timeout;
|
|
||||||
server.on("connection", (socket) => socket.setTimeout(timeout));
|
|
||||||
};
|
|
||||||
startServer();
|
|
||||||
|
|
51
src/middlewares/auth-middleware.js
Normal file
51
src/middlewares/auth-middleware.js
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
// Imports
|
||||||
|
const axios = require("axios");
|
||||||
|
const express = require("express");
|
||||||
|
const authMiddleware = express.Router();
|
||||||
|
const authOptMiddleware = express.Router();
|
||||||
|
const bearerTokenMiddleware = require("express-bearer-token")();
|
||||||
|
const crudUsers = require("../crud/users");
|
||||||
|
// Constants
|
||||||
|
const { CAIRO_URL } = process.env;
|
||||||
|
// Methods
|
||||||
|
const cairoAuthenticate = async (token) => {
|
||||||
|
const config = { headers: { Authorization: `Bearer ${token}` } };
|
||||||
|
return axios
|
||||||
|
.get(`${CAIRO_URL}/api/user/info`, config)
|
||||||
|
.then((res) => res.data);
|
||||||
|
};
|
||||||
|
|
||||||
|
const cairoAuthOptMiddleware = (req, res, next) => {
|
||||||
|
if (!req.token) next();
|
||||||
|
else
|
||||||
|
cairoAuthenticate(req.token)
|
||||||
|
.then((authData) => crudUsers.upsertUser(authData.id))
|
||||||
|
.then((users) => (req.user = users[0]))
|
||||||
|
.catch()
|
||||||
|
.then(() => next());
|
||||||
|
};
|
||||||
|
// Middleware
|
||||||
|
const cairoAuthMiddleware = (req, res, next) => {
|
||||||
|
if (!req.token) return res.status(401).send("Cairo token required!");
|
||||||
|
logVerbose("AUTH", `${CAIRO_URL}/api/user/info`);
|
||||||
|
cairoAuthenticate(req.token)
|
||||||
|
.then((authData) => crudUsers.upsertUser(authData.id))
|
||||||
|
.then((users) => (req.user = users[0]))
|
||||||
|
.then(() => next())
|
||||||
|
.catch((err) => {
|
||||||
|
logError("AUTH", err.response ? err.response.data : err.message);
|
||||||
|
if (!err.response) return res.status(500).send(`Auth failure ${err}`);
|
||||||
|
return res.status(err.response.status).send(err.response.data);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
authMiddleware: authMiddleware.use([
|
||||||
|
bearerTokenMiddleware,
|
||||||
|
cairoAuthMiddleware,
|
||||||
|
]),
|
||||||
|
authOptMiddleware: authOptMiddleware.use([
|
||||||
|
bearerTokenMiddleware,
|
||||||
|
cairoAuthOptMiddleware,
|
||||||
|
]),
|
||||||
|
};
|
2
src/middlewares/body-parser.js
Normal file
2
src/middlewares/body-parser.js
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
const bodyParserMiddleware = require("express").json({ limit: "50mb" });
|
||||||
|
module.exports = bodyParserMiddleware;
|
2
src/middlewares/url-encoded.js
Normal file
2
src/middlewares/url-encoded.js
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
const urlEncodedMiddleware = require("express").urlencoded({ limit: "500mb" });
|
||||||
|
module.exports = urlEncodedMiddleware;
|
|
@ -1,86 +1,107 @@
|
||||||
const express = require("express");
|
// Imports
|
||||||
const axios = require("axios");
|
const router = require("express").Router();
|
||||||
// Local Imports & Configs
|
const storageUtil = require("../utils/storage-util");
|
||||||
const asUser = require("../api/user");
|
const crudFiles = require("../crud/files");
|
||||||
const upload = require("../api/upload");
|
const crudUsers = require("../crud/users");
|
||||||
const storage = require("../api/storage");
|
// Constants
|
||||||
const config = require("../config.json");
|
const bucket = "uploads";
|
||||||
// Establish path and create router
|
const container = (req) => req.user.cairo_id;
|
||||||
// Absolute Router Path /api/stash
|
const uploadField = "user-selected-file";
|
||||||
const router = express.Router();
|
const upload = storageUtil.buildBucketUpload(bucket, uploadField, container);
|
||||||
|
const upsertMiddleware = [authMiddleware, bodyParserMiddleware];
|
||||||
const cairoMiddleware = (req, res, next) => {
|
const uploadMiddleware = [authMiddleware, urlEncodedMiddleware, upload];
|
||||||
if (req.token == null) return next();
|
router.get("/files", authMiddleware, (req, res) => {
|
||||||
else
|
crudFiles.queryOwnedFiles(req.user.cairo_id).then((files) => res.json(files));
|
||||||
axios
|
|
||||||
.get(`${process.env.CAIRO_URL}/api/user/info`, {
|
|
||||||
headers: { authorization: `Bearer ${req.token}` },
|
|
||||||
})
|
|
||||||
.then((authRes) => {
|
|
||||||
if (authRes.data && authRes.data.id) {
|
|
||||||
asUser.load(authRes.data.id).then((user) => {
|
|
||||||
req.user = user;
|
|
||||||
next();
|
|
||||||
});
|
});
|
||||||
} else res.status(500).json(authRes.data);
|
|
||||||
})
|
|
||||||
.catch((e) => {
|
|
||||||
if (e.response)
|
|
||||||
return res
|
|
||||||
.status(401)
|
|
||||||
.send(`Auth server responded with: ${e.response.status}`);
|
|
||||||
console.error(e);
|
|
||||||
res.sendStatus(500);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const authMiddleware = (req, res, next) => {
|
router.post("/upload", uploadMiddleware, async (req, res) => {
|
||||||
if (req.token == null) return res.sendStatus(401);
|
const users = await crudUsers.queryUserByCairoId(req.user.cairo_id);
|
||||||
next();
|
const user = users[0];
|
||||||
};
|
user.used_storage = parseInt(user.used_storage);
|
||||||
router.use(cairoMiddleware);
|
user.storage = parseInt(user.storage);
|
||||||
|
if (req.file.size + user.used_storage > user.storage) {
|
||||||
router.get("/files", authMiddleware, (req, res) =>
|
storageUtil.cancelUpload(req, req.file.filename);
|
||||||
asUser.getOwnedFiles(req.user.cairoId, req.user.owned).then((files) => {
|
return res.sendStatus(500);
|
||||||
res.status(200).json(files);
|
}
|
||||||
})
|
const fileInsert = crudFiles.insertFile(
|
||||||
|
req.user.cairo_id,
|
||||||
|
req.file.filename,
|
||||||
|
req.file.originalname,
|
||||||
|
req.file.size
|
||||||
);
|
);
|
||||||
|
|
||||||
router.post("/upload", authMiddleware, (req, res) => {
|
const userInsert = crudUsers.updateUsedStorage(
|
||||||
upload.userUpload(req, res, (err) => {
|
req.user.cairo_id,
|
||||||
if (err || req.file == null) return res.sendStatus(500);
|
req.file.size + user.used_storage
|
||||||
asUser.uploadFile(req.user.cairoId, req.file).then((file) => {
|
);
|
||||||
if (file != null) return res.json(file);
|
|
||||||
upload.cancelUpload(req.file.path);
|
Promise.all([fileInsert, userInsert])
|
||||||
return res.sendStatus(500);
|
.then((results) => res.json(results[0][0]))
|
||||||
});
|
.catch((err) => res.sendStatus(500));
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/delete", authMiddleware, (req, res) => {
|
router.post("/delete", upsertMiddleware, (req, res) => {
|
||||||
if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400);
|
if (!req.body || !Array.isArray(req.body))
|
||||||
asUser.deleteFiles(req.user.cairoId, req.body).then((failed) => {
|
return res.sendStatus(400).send("Files must be an array!");
|
||||||
if (!failed) return res.sendStatus(200);
|
const files = req.body;
|
||||||
res.status(500).json(failed);
|
// Technically this should be checked to see if they own it/can edit it
|
||||||
});
|
// but timecrunch so will fix later
|
||||||
|
const queries = files.map((fileId) =>
|
||||||
|
crudFiles.deleteFile(fileId).then((files) => {
|
||||||
|
try {
|
||||||
|
storageUtil.deleteUpload(bucket, req.user.cairo_id, files[0].filename);
|
||||||
|
return files[0];
|
||||||
|
} catch (err) {
|
||||||
|
logError("STASH", `Could not delete file(s)`, files);
|
||||||
|
return { filesize: 0 };
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
Promise.all(queries)
|
||||||
|
.then((results) => {
|
||||||
|
var deletedData = 0;
|
||||||
|
results.forEach((file) => (deletedData += file.filesize));
|
||||||
|
crudUsers
|
||||||
|
.updateUsedStorage(
|
||||||
|
req.user.cairo_id,
|
||||||
|
Math.max(parseInt(req.user.used_storage) - deletedData, 0)
|
||||||
|
)
|
||||||
|
.catch((err) =>
|
||||||
|
logError(
|
||||||
|
"STASH",
|
||||||
|
"Could not update storage for ",
|
||||||
|
req.user.cairo_id,
|
||||||
|
err
|
||||||
|
)
|
||||||
|
);
|
||||||
|
res.sendStatus(200);
|
||||||
|
})
|
||||||
|
.catch((err) => res.status(500).json([]));
|
||||||
|
// Delete the file
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/download", (req, res) => {
|
router.get("/download", authOptMiddleware, async (req, res) => {
|
||||||
if (!req.query || (!req.query.target && !req.query.zipTarget))
|
if (!req.query || !req.query.target) return res.sendStatus(404);
|
||||||
return res.sendStatus(404);
|
const files = await crudFiles.getFile(req.query.target);
|
||||||
const userId = req.user == null ? null : req.user.cairoId;
|
if (files.length !== 1) return res.sendStatus(404);
|
||||||
if (req.query.target)
|
const file = files[0];
|
||||||
return storage.getFile(userId, req.query.target).then((file) => {
|
file.view = file.view ?? [];
|
||||||
if (file) return res.download(file.path);
|
file.edit = file.edit ?? [];
|
||||||
return res.sendStatus(404);
|
var fpath = storageUtil.fetchUpload(bucket, file.owner, file.filename);
|
||||||
});
|
if (file.public) return res.download(fpath); // Send if public
|
||||||
return res.sendStatus(404);
|
if (!req.user) return res.sendStatus(401);
|
||||||
|
const userId = req.user.cairo_id;
|
||||||
|
if (file.owner === userId) return res.download(fpath); // Send if owner
|
||||||
|
if (file.edit.includes(userId) || file.view.includes(userId))
|
||||||
|
return res.download(fpath); // Send if view/edit
|
||||||
|
return res.sendStatus(401);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/public", authMiddleware, async (req, res) => {
|
router.post("/public", upsertMiddleware, async (req, res) => {
|
||||||
if (!req.body || !(req.body instanceof Array)) return res.sendStatus(400);
|
if (!req.body || !Array.isArray(req.body))
|
||||||
const failed = asUser.publicfyFiles(req.user.cairoId, req.body);
|
return res.sendStatus(400).send("Files must be an array!");
|
||||||
if (!failed) return res.sendStatus(200);
|
req.body.forEach((fileId) => crudFiles.setPublic(fileId));
|
||||||
res.status(500).json(failed);
|
res.sendStatus(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
const mongoose = require("mongoose");
|
|
||||||
const Schema = mongoose.Schema;
|
|
||||||
const ObjId = mongoose.Types.ObjectId;
|
|
||||||
|
|
||||||
const file = new Schema(
|
|
||||||
{
|
|
||||||
path: {
|
|
||||||
type: String,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
owner: { type: String},
|
|
||||||
name: {
|
|
||||||
type: String,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
date: {
|
|
||||||
type: String,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
size: {
|
|
||||||
type: Number,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
public: {
|
|
||||||
type: Boolean,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
edit: {
|
|
||||||
type: [],
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
view: {
|
|
||||||
type: [],
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{ collection: "files" }
|
|
||||||
);
|
|
||||||
|
|
||||||
module.exports = mongoose.model("file", file);
|
|
|
@ -1,30 +0,0 @@
|
||||||
const mongoose = require("mongoose");
|
|
||||||
const Schema = mongoose.Schema;
|
|
||||||
const ObjId = mongoose.Types.ObjectId;
|
|
||||||
const user = new Schema(
|
|
||||||
{
|
|
||||||
cairoId: {
|
|
||||||
type: String,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
storage: {
|
|
||||||
type: Number,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
usedStorage: {
|
|
||||||
type: Number,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
owned: {
|
|
||||||
type: [ObjId],
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
shared: {
|
|
||||||
type: [],
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{ collection: "users" }
|
|
||||||
);
|
|
||||||
|
|
||||||
module.exports = mongoose.model("user", user);
|
|
18
src/utils/globals.js
Normal file
18
src/utils/globals.js
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
// Envar Overrides
|
||||||
|
process.env.FORCE_COLOR = 1;
|
||||||
|
// Utils
|
||||||
|
const logging = require("./logging");
|
||||||
|
// Middlewares
|
||||||
|
const bodyParserMiddleware = require("../middlewares/body-parser");
|
||||||
|
const urlEncodedMiddleware = require("../middlewares/url-encoded");
|
||||||
|
const authMiddlewares = require("../middlewares/auth-middleware");
|
||||||
|
// Logging Globals
|
||||||
|
global.logInfo = logging.logInfo;
|
||||||
|
global.logError = logging.logError;
|
||||||
|
global.logSuccess = logging.logSuccess;
|
||||||
|
global.logVerbose = logging.logVerbose;
|
||||||
|
// Middlewares
|
||||||
|
global.bodyParserMiddleware = bodyParserMiddleware;
|
||||||
|
global.authMiddleware = authMiddlewares.authMiddleware;
|
||||||
|
global.authOptMiddleware = authMiddlewares.authOptMiddleware;
|
||||||
|
global.urlEncodedMiddleware = urlEncodedMiddleware;
|
20
src/utils/logging.js
Normal file
20
src/utils/logging.js
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
// Imports
|
||||||
|
const { redBright, greenBright, cyanBright, magentaBright } = require("chalk");
|
||||||
|
// Force terminal to use color
|
||||||
|
process.env.FORCE_COLOR = "1";
|
||||||
|
// Methods
|
||||||
|
const logCustom = (color, header, ...info) =>
|
||||||
|
console.log(color(`[${header}]`), ...info);
|
||||||
|
// Extensions
|
||||||
|
const logInfo = (header, ...args) => logCustom(cyanBright, header, ...args);
|
||||||
|
const logError = (header, ...args) => logCustom(redBright, header, ...args);
|
||||||
|
const logSuccess = (header, ...args) => logCustom(greenBright, header, ...args);
|
||||||
|
const logVerbose = (header, ...args) =>
|
||||||
|
logCustom(magentaBright, header, ...args);
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
logInfo,
|
||||||
|
logError,
|
||||||
|
logSuccess,
|
||||||
|
logVerbose,
|
||||||
|
};
|
127
src/utils/postgres-common.js
Normal file
127
src/utils/postgres-common.js
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
const buildPostgresEntry = (entry) => {
|
||||||
|
const pgEntry = { ...entry };
|
||||||
|
Object.keys(pgEntry).forEach((col) => {
|
||||||
|
if (pgEntry[col] === undefined) delete pgEntry[col];
|
||||||
|
});
|
||||||
|
return pgEntry;
|
||||||
|
};
|
||||||
|
|
||||||
|
const buildPostgresValue = (jsVar) => {
|
||||||
|
if (jsVar === null) return "null";
|
||||||
|
if (typeof jsVar === "string") return buildPostgresString(jsVar);
|
||||||
|
if (Array.isArray(jsVar) && jsVar.length === 0) return "null";
|
||||||
|
if (Array.isArray(jsVar) && isTypeArray(jsVar, "string"))
|
||||||
|
return buildPostgresStringArray(jsVar);
|
||||||
|
return jsVar;
|
||||||
|
};
|
||||||
|
|
||||||
|
const buildPostgresStringArray = (jsonArray) => {
|
||||||
|
if (jsonArray.length === 0) return null;
|
||||||
|
var pgArray = [...jsonArray];
|
||||||
|
var arrayString = "ARRAY [";
|
||||||
|
pgArray.forEach((e, i) => (pgArray[i] = `'${e}'`));
|
||||||
|
arrayString += pgArray.join(",");
|
||||||
|
arrayString += "]";
|
||||||
|
return arrayString;
|
||||||
|
};
|
||||||
|
|
||||||
|
const isTypeArray = (jsonArray, type) =>
|
||||||
|
jsonArray.every((e) => typeof e === type);
|
||||||
|
|
||||||
|
const buildPostgresString = (jsonString) =>
|
||||||
|
(jsonString && `'${jsonString.replaceAll("'", "''")}'`) || null;
|
||||||
|
|
||||||
|
const insertQuery = (table, jsEntry) => {
|
||||||
|
if (typeof jsEntry !== "object") throw Error("PG Inserts must be objects!");
|
||||||
|
const entry = buildPostgresEntry(jsEntry);
|
||||||
|
const cols = Object.keys(entry);
|
||||||
|
cols.forEach((col, i) => {
|
||||||
|
entry[col] = buildPostgresValue(entry[col]);
|
||||||
|
col[i] = `"${col}"`;
|
||||||
|
});
|
||||||
|
var query = `INSERT INTO ${table}(${cols.join(",")})\n`;
|
||||||
|
query += `VALUES(${Object.values(entry).join(",")})`;
|
||||||
|
return query;
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteQuery = (table, jsEntry) => {
|
||||||
|
if (typeof jsEntry !== "object")
|
||||||
|
throw Error("PG Delete conditionals must be an object!");
|
||||||
|
const entry = buildPostgresEntry(jsEntry);
|
||||||
|
const cols = Object.keys(entry);
|
||||||
|
const conditionals = [];
|
||||||
|
for (var col of cols) {
|
||||||
|
entry[col] = buildPostgresValue(entry[col]);
|
||||||
|
conditionals.push(`x.${col}=${entry[col]}`);
|
||||||
|
}
|
||||||
|
return `DELETE FROM ${table} x WHERE ${conditionals.join(" AND ")}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const onConflictUpdate = (conflicts, updates) => {
|
||||||
|
if (!Array.isArray(conflicts)) throw Error("PG Conflicts must be an array!");
|
||||||
|
if (typeof updates !== "object") throw Error("PG Updates must be objects!");
|
||||||
|
const entry = buildPostgresEntry(updates);
|
||||||
|
var query = `ON CONFLICT (${conflicts.join(",")}) DO UPDATE SET\n`;
|
||||||
|
const cols = Object.keys(entry);
|
||||||
|
for (var col of cols) {
|
||||||
|
entry[col] = buildPostgresValue(entry[col]);
|
||||||
|
}
|
||||||
|
query += cols.map((c) => `${c}=${entry[c]}`).join(",");
|
||||||
|
return query;
|
||||||
|
};
|
||||||
|
|
||||||
|
const clearTableQuery = (table) => {
|
||||||
|
return `TRUNCATE ${table}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectWhereQuery = (table, jsEntry, joinWith) => {
|
||||||
|
if (typeof jsEntry !== "object") throw Error("PG Where must be an object!");
|
||||||
|
const where = buildPostgresEntry(jsEntry);
|
||||||
|
const cols = Object.keys(where);
|
||||||
|
var query = `SELECT * FROM ${table} AS x WHERE\n`;
|
||||||
|
for (var col of cols) {
|
||||||
|
where[col] = buildPostgresValue(where[col]);
|
||||||
|
}
|
||||||
|
return (query += cols.map((c) => `x.${c}=${where[c]}`).join(joinWith));
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateWhereQuery = (table, updates, wheres, joinWith) => {
|
||||||
|
if (typeof updates !== "object") throw Error("PG Updates must be an object!");
|
||||||
|
if (typeof wheres !== "object") throw Error("PG Wheres must be an object!");
|
||||||
|
const update = buildPostgresEntry(updates);
|
||||||
|
const where = buildPostgresEntry(wheres);
|
||||||
|
const updateCols = Object.keys(update);
|
||||||
|
const whereCols = Object.keys(where);
|
||||||
|
var query = `UPDATE ${table}\n`;
|
||||||
|
var updateQuery = updateCols
|
||||||
|
.map((c) => `${c} = ${buildPostgresValue(update[c])}`)
|
||||||
|
.join(",");
|
||||||
|
var whereQuery = whereCols
|
||||||
|
.map((c) => `${c} = ${buildPostgresValue(where[c])}`)
|
||||||
|
.join(joinWith);
|
||||||
|
return (query += `SET ${updateQuery} WHERE ${whereQuery}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateWhereAnyQuery = (table, updates, wheres) =>
|
||||||
|
updateWhereQuery(table, updates, wheres, " OR ");
|
||||||
|
|
||||||
|
const updateWhereAllQuery = (table, updates, wheres) =>
|
||||||
|
updateWhereQuery(table, updates, wheres, " AND ");
|
||||||
|
|
||||||
|
const selectWhereAnyQuery = (table, where) =>
|
||||||
|
selectWhereQuery(table, where, " OR ");
|
||||||
|
|
||||||
|
const selectWhereAllQuery = (table, where) =>
|
||||||
|
selectWhereQuery(table, where, " AND ");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
selectWhereAnyQuery,
|
||||||
|
selectWhereAllQuery,
|
||||||
|
updateWhereAnyQuery,
|
||||||
|
updateWhereAllQuery,
|
||||||
|
insertQuery,
|
||||||
|
deleteQuery,
|
||||||
|
buildPostgresValue,
|
||||||
|
onConflictUpdate,
|
||||||
|
clearTableQuery,
|
||||||
|
};
|
31
src/utils/postgres.js
Normal file
31
src/utils/postgres.js
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
// Imports
|
||||||
|
const { migrate } = require("postgres-migrations");
|
||||||
|
const pgp = require("pg-promise")();
|
||||||
|
const moment = require("moment");
|
||||||
|
|
||||||
|
// Ensure dates get saved as UTC date strings
|
||||||
|
// This prevents the parser from doing strange datetime operations
|
||||||
|
pgp.pg.types.setTypeParser(1114, (str) => moment.utc(str).format());
|
||||||
|
|
||||||
|
// Constants
|
||||||
|
const NUBIAN_POSTGRES_DISABLED = process.env.NUBIAN_POSTGRES_DISABLED ?? false;
|
||||||
|
const database = "nubian";
|
||||||
|
const dbConfig = {
|
||||||
|
database,
|
||||||
|
user: process.env.NUBIAN_POSTGRES_USER,
|
||||||
|
password: process.env.NUBIAN_POSTGRES_PASSWORD,
|
||||||
|
host: process.env.NUBIAN_POSTGRES_HOST,
|
||||||
|
port: 5432,
|
||||||
|
ensureDatabaseExists: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
const configure = async () => {
|
||||||
|
if (NUBIAN_POSTGRES_DISABLED) return logInfo("POSTGRES", "Postgres Disabled");
|
||||||
|
await migrate(dbConfig, "src/db-migrations");
|
||||||
|
// Override the global variable DB
|
||||||
|
global.PG = pgp(dbConfig);
|
||||||
|
await PG.connect();
|
||||||
|
logSuccess("POSTGRES", `Connected to database ${database}!`);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = { configure };
|
55
src/utils/storage-util.js
Normal file
55
src/utils/storage-util.js
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
// Imports
|
||||||
|
const fs = require("fs");
|
||||||
|
const { resolve: rsPath } = require("path");
|
||||||
|
const multer = require("multer");
|
||||||
|
// Constants
|
||||||
|
const bucketsPath = process.env.NUBIAN_BUCKET_PATH ?? "buckets/";
|
||||||
|
// Methods
|
||||||
|
const bucketStorage = (bucketStoragePath, containerCb) =>
|
||||||
|
multer.diskStorage({
|
||||||
|
destination: (req, file, cb) =>
|
||||||
|
cb(null, upsertDestination(req, bucketStoragePath, containerCb)),
|
||||||
|
filename: (req, file, cb) => {
|
||||||
|
const n = file.originalname.replaceAll(" ", "_");
|
||||||
|
const fileName = `${Date.now()}-${n}`;
|
||||||
|
req.on("aborted", () => cancelUpload(req, fileName));
|
||||||
|
cb(null, fileName);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const buildBucketUpload = (bucketStoragePath, fieldName, containerCb) =>
|
||||||
|
multer({
|
||||||
|
storage: bucketStorage(bucketStoragePath, containerCb),
|
||||||
|
}).single(fieldName);
|
||||||
|
|
||||||
|
const cancelUpload = (req, fileName) => {
|
||||||
|
const path = rsPath(req.bucketStoragePath, fileName);
|
||||||
|
deletePath(path);
|
||||||
|
};
|
||||||
|
|
||||||
|
const deletePath = (path) => fs.existsSync(path) && fs.unlinkSync(path);
|
||||||
|
|
||||||
|
const deleteUpload = (bucketStoragePath, container, filename) =>
|
||||||
|
deletePath(fetchUpload(bucketStoragePath, container, filename));
|
||||||
|
|
||||||
|
const fetchUpload = (bucketStoragePath, container, filename) =>
|
||||||
|
rsPath(bucketsPath, bucketStoragePath, container, filename);
|
||||||
|
|
||||||
|
const upsertDestination = (req, bucketStoragePath, containerCb) => {
|
||||||
|
var container = "";
|
||||||
|
if (containerCb) {
|
||||||
|
if (typeof containerCb !== "function")
|
||||||
|
throw Error("Container Callback must be a function!");
|
||||||
|
container = containerCb(req);
|
||||||
|
}
|
||||||
|
req.bucketStoragePath = rsPath(bucketsPath, bucketStoragePath, container);
|
||||||
|
fs.mkdirSync(req.bucketStoragePath, { recursive: true });
|
||||||
|
return req.bucketStoragePath;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
buildBucketUpload,
|
||||||
|
cancelUpload,
|
||||||
|
deleteUpload,
|
||||||
|
fetchUpload,
|
||||||
|
};
|
Reference in a new issue