Alexandria commit
- Commit to using Alexandria DB - Fixed a bug when a user didn't exist but loading was attempted
This commit is contained in:
parent
3ee7a3a515
commit
1f9970929f
12 changed files with 61 additions and 664 deletions
1
.dockerignore
Normal file
1
.dockerignore
Normal file
|
@ -0,0 +1 @@
|
|||
src/uploads/
|
|
@ -1,8 +1,8 @@
|
|||
FROM node:latest
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
WORKDIR /dunestorm/nubian
|
||||
COPY package.json /dunestorm/nubian/
|
||||
COPY src /dunestorm/nubian/src
|
||||
COPY package.json .
|
||||
COPY src src
|
||||
RUN npm i
|
||||
|
||||
CMD ["npm","start"]
|
||||
|
|
20
package.json
20
package.json
|
@ -1,38 +1,32 @@
|
|||
{
|
||||
"name": "duneserver",
|
||||
"name": "nubian",
|
||||
"version": "1.0.0",
|
||||
"description": "Dunemask Web server",
|
||||
"description": "Express backend for Khufu",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node src/server.js --no-warnings",
|
||||
"test-server": "nodemon src/server.js"
|
||||
"start-dev": "nodemon src/server.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Dunemask/npmserver.git"
|
||||
"url": "git+https://gitlab.com/Dunemask/nubian.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Dunemask/npmserver/issues"
|
||||
"url": "https://gitlab.com/Dunemask/nubian"
|
||||
},
|
||||
"homepage": "https://github.com/Dunemask/npmserver#readme",
|
||||
"homepage": "https://gitlab.com/Dunemask/nubian#readme",
|
||||
"dependencies": {
|
||||
"adm-zip": "^0.5.5",
|
||||
"axios": "^0.21.1",
|
||||
"bcrypt": "^5.0.1",
|
||||
"body-parser": "^1.19.0",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.17.1",
|
||||
"express-bearer-token": "^2.4.0",
|
||||
"express-session": "^1.17.2",
|
||||
"install": "^0.13.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mongodb": "^4.0.1",
|
||||
"mongoose": "^5.13.3",
|
||||
"multer": "^1.4.2",
|
||||
"path": "^0.12.7",
|
||||
"rimraf": "^3.0.2",
|
||||
"uuid-with-v6": "^1.1.2"
|
||||
"rimraf": "^3.0.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@ mongoose.connect(`${config.Storage.AlexandriaUrl}/nubian?authSource=admin`, {
|
|||
const users = require("../schemas/user");
|
||||
const files = require("../schemas/file");
|
||||
|
||||
const zipDir = resolvePath(config.Storage.ZipPath);
|
||||
|
||||
function authorizedToView(userId, file) {
|
||||
if (`${file.owner}` == (userId = `${userId}`) || file.public) return true;
|
||||
if (file.view.includes(userId) || file.edit.includes(userId)) return true;
|
||||
|
@ -88,38 +86,26 @@ function publicfyFiles(userId, targetFiles) {
|
|||
});
|
||||
}
|
||||
|
||||
function createUser(cairoUuid) {
|
||||
return users.create(
|
||||
{
|
||||
cairoUuid,
|
||||
function createUser(cairoId) {
|
||||
return users.create({
|
||||
cairoId,
|
||||
usedStorage: 0,
|
||||
storage: config.Storage.UserStorageSize * config.Storage.UserStorageUnit,
|
||||
owned: [],
|
||||
shared: [],
|
||||
},
|
||||
(err, result) => {
|
||||
if (err) return err;
|
||||
return result;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function getUserByCairoUuid(cairoUuid) {
|
||||
return users.findOne({ cairoUuid: cairoUuid }, (err, result) => {
|
||||
if (result == null) createUser(cairoUuid);
|
||||
if (err) console.error(err);
|
||||
});
|
||||
}
|
||||
|
||||
function getUserById(userId) {
|
||||
return users.findOne({ _id: userId }, (err, result) => {
|
||||
if (result == null) createUser(cairoUuid);
|
||||
if (err) console.error(err);
|
||||
function getUserByCairoId(cairoId) {
|
||||
return users.findOne({ cairoId }).then((user) => {
|
||||
console.log("FOUND vvvv",user);
|
||||
if (!user) return createUser(cairoId);
|
||||
return user;
|
||||
});
|
||||
}
|
||||
|
||||
function uploadFile(userId, fileData) {
|
||||
return getUserById(userId).then((user) => {
|
||||
function uploadFile(cairoId, fileData) {
|
||||
return getUserByCairoId(cairoId).then((user) => {
|
||||
if (user.usedStorage + fileData.size > user.storage) return null;
|
||||
return users
|
||||
.updateOne({ _id: userId }, { $inc: { usedStorage: fileData.size } })
|
||||
|
@ -144,31 +130,11 @@ function createFile(userId, fileData) {
|
|||
view: [],
|
||||
});
|
||||
}
|
||||
|
||||
function cleanZips() {
|
||||
console.log("Would clean zips");
|
||||
return;
|
||||
var zipUuid;
|
||||
const time = Date.now();
|
||||
readdir(zipDir).forEach((file) => {
|
||||
zipUuid = file.substring(0, file.indexOf(".zip"));
|
||||
fileStorage.updateEntry(zipUuid, "zips", (entry, deleteEntry) => {
|
||||
if (entry == null) return;
|
||||
if (entry.building === true) return;
|
||||
if (entry.exp <= time) {
|
||||
deleteEntry();
|
||||
fremove(entry.path);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
module.exports = {
|
||||
deleteFiles,
|
||||
getFile,
|
||||
createUser,
|
||||
getUserById,
|
||||
getUserByCairoUuid,
|
||||
getUserByCairoId,
|
||||
publicfyFiles,
|
||||
uploadFile,
|
||||
cleanZips,
|
||||
};
|
||||
|
|
|
@ -1,35 +1,33 @@
|
|||
//Module Imports
|
||||
const { resolve: resolvePath } = require("path");
|
||||
const { existsSync: fexists, unlinkSync: fremove } = require("fs");
|
||||
const uuidGen = require("uuid-with-v6").v6;
|
||||
//Local Imports
|
||||
const storage = require("./storage");
|
||||
const config = require("../config.json");
|
||||
|
||||
function load(uid) {
|
||||
return storage.getUserByCairoUuid(uid);
|
||||
function load(cairoId) {
|
||||
return storage.getUserByCairoId(cairoId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a user with a uuid (should use Dunestorm API to login)
|
||||
* Create a user with a cairoId (should use Dunestorm API to login)
|
||||
*/
|
||||
function createUser(uuid) {
|
||||
storage.createUser(uuid);
|
||||
function createUser(cairoId) {
|
||||
storage.createUser(cairoId);
|
||||
}
|
||||
/**
|
||||
* Creates file entry given aspects of a file updated
|
||||
*/
|
||||
function uploadFile(uuid, fileData) {
|
||||
return storage.uploadFile(uuid, fileData);
|
||||
function uploadFile(cairoId, fileData) {
|
||||
return storage.uploadFile(cairoId, fileData);
|
||||
}
|
||||
/**
|
||||
* Deletes files.
|
||||
* Requires Uuid to garuntee permission to delete a file
|
||||
* Requires cairoId to garuntee permission to delete a file
|
||||
* Sorts files by user before deleting to speed up reference updates
|
||||
*/
|
||||
function deleteFiles(uuid, targetFiles) {
|
||||
//Sort files by fileuuid to remove entries from the various users
|
||||
return storage.deleteFiles(uuid, targetFiles).then((deleteData) => {
|
||||
function deleteFiles(cairoId, targetFiles) {
|
||||
return storage.deleteFiles(cairoId, targetFiles).then((deleteData) => {
|
||||
var files = deleteData.files;
|
||||
var deleteFails = deleteData.failed;
|
||||
files.forEach((file) => {
|
||||
|
@ -44,11 +42,9 @@ function deleteFiles(uuid, targetFiles) {
|
|||
});
|
||||
}
|
||||
/**
|
||||
* Returns a list of fileUuids that the user owns
|
||||
* Returns a list of filecairoIds that the user owns
|
||||
*/
|
||||
function getOwnedFiles(userId) {
|
||||
return storage.getUserById(userId).then((user) => {
|
||||
const fileList = user.owned;
|
||||
function getOwnedFiles(fileList) {
|
||||
var files = new Array(fileList.length);
|
||||
fileList.forEach(
|
||||
(file, i) =>
|
||||
|
@ -57,29 +53,28 @@ function getOwnedFiles(userId) {
|
|||
))
|
||||
);
|
||||
return Promise.all(files);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* TODO: Impliment Advanced Sharing
|
||||
* Shares file with various people, and various permissions
|
||||
*/
|
||||
function shareFile(uuid, targetFile) {
|
||||
console.log(uuid, "requesting to share file");
|
||||
function shareFile(cairoId, targetFile) {
|
||||
console.log(cairoId, "requesting to share file");
|
||||
console.log(targetFile);
|
||||
}
|
||||
/**
|
||||
* TODO: Impliment Advanced Sharing
|
||||
* Returns all files shared with a user
|
||||
*/
|
||||
function getSharedFiles(uuid) {
|
||||
return storage.getSharedFileList(uuid);
|
||||
function getSharedFiles(cairoId) {
|
||||
return storage.getSharedFileList(cairoId);
|
||||
}
|
||||
/**
|
||||
* Checks if a the user is the owner and then toggles the list of files to public
|
||||
*/
|
||||
function publicfyFiles(uuid, files) {
|
||||
function publicfyFiles(cairoId, files) {
|
||||
var publicfyFails = [];
|
||||
storage.publicfyFiles(uuid, files);
|
||||
storage.publicfyFiles(cairoId, files);
|
||||
return publicfyFails.length > 0 && publicfyFails;
|
||||
}
|
||||
module.exports = {
|
||||
|
|
|
@ -1,21 +1,15 @@
|
|||
{
|
||||
"Storage": {
|
||||
"AlexandriaUrl": "mongodb://alexandria.dunestorm.net",
|
||||
"DesertPath": "src/desert/",
|
||||
"UploadPath": "src/uploads/",
|
||||
"ZipPath": "zips/",
|
||||
"UserStorageSize": 2048,
|
||||
"UserStorageUnit": 1048576,
|
||||
"UploadMaxSize": "",
|
||||
"ZipClickExpire": 60000,
|
||||
"ZipDownloadExpire": 720000
|
||||
"UploadMaxSize": ""
|
||||
},
|
||||
"Server": {
|
||||
"Port": 52001,
|
||||
"Debug": false,
|
||||
"BodyLimit": "5mb",
|
||||
"ZipRemovalInterval": 1800000,
|
||||
"jwtHeader": "authorization",
|
||||
"authServer": "http://cairo.dunestorm.net:52000/api/user/data"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
{
|
||||
"desertPath": "src/desert/",
|
||||
"schema": {
|
||||
"files": {
|
||||
"tokenList": "abcdefghijklmnopqrstuvwxyz0123456789",
|
||||
"tokenSplitters": "0123456789abcdef",
|
||||
"entrySplit": 500,
|
||||
"attr": [
|
||||
"fileUuid",
|
||||
"path",
|
||||
"owner",
|
||||
"name",
|
||||
"date",
|
||||
"size",
|
||||
"public",
|
||||
"edit",
|
||||
"view"
|
||||
]
|
||||
},
|
||||
"zips": {
|
||||
"tokenList": "abcdefghijklmnopqrstuvwxyz0123456789",
|
||||
"tokenSplitters": "0123456789abcdef",
|
||||
"entrySplit": 500,
|
||||
"attr": ["owner", "path", "exp"]
|
||||
},
|
||||
"uuid": {
|
||||
"tokenList": "abcdefghijklmnopqrstuvwxyz0123456789",
|
||||
"tokenSplitters": "0123456789abcdef",
|
||||
"entrySplit": 500,
|
||||
"attr": ["owned", "shared", "storage", "usedStorage"]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,151 +0,0 @@
|
|||
//Module Imports
|
||||
const {
|
||||
existsSync: fexists,
|
||||
readFileSync: fread,
|
||||
writeFileSync: fwrite,
|
||||
rmSync: fremove,
|
||||
} = require("fs");
|
||||
const { join: joinPath, basename } = require("path");
|
||||
const _ = require("lodash");
|
||||
//Local Imports
|
||||
const Pyramid = require("./pyramid");
|
||||
//Constants
|
||||
//Misc Functions
|
||||
function isObject(obj) {
|
||||
return typeof obj === "object" && !(obj instanceof Array);
|
||||
}
|
||||
//Main Class
|
||||
module.exports = class Pharoah {
|
||||
constructor(desertPath, schema) {
|
||||
this.desertPath = desertPath;
|
||||
var mainStorageName = Object.keys(schema)[0];
|
||||
var mainStorage = schema[mainStorageName];
|
||||
this.pyramids = {};
|
||||
this.pyramidSchemas = this.#buildSchema(schema);
|
||||
}
|
||||
|
||||
addEntry(query, pyramidName, entry) {
|
||||
this.pyramids[pyramidName].pyramid.addStorageEntry(query, entry);
|
||||
if (!(this.pyramids[pyramidName].refs instanceof Array)) {
|
||||
return;
|
||||
}
|
||||
for (var r of this.pyramids[pyramidName].refs) {
|
||||
if (entry[r] != null)
|
||||
this.pyramids[r].pyramid.addStorageEntry(entry[r], query);
|
||||
}
|
||||
}
|
||||
|
||||
deleteEntry(query, pyramidName) {
|
||||
const entry = this.pyramids[pyramidName].pyramid.removeStorageEntry(query);
|
||||
if (entry == null) return;
|
||||
if (!(this.pyramids[pyramidName].refs instanceof Array)) return entry;
|
||||
for (var r of this.pyramids[pyramidName].refs) {
|
||||
if (entry[r] != null)
|
||||
this.pyramids[r].pyramid.removeStorageEntry(entry[r]);
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
updateEntry(query, pyramidName, cb) {
|
||||
if (cb === null || typeof cb !== "function")
|
||||
throw new Error("Error: Callback cannot be " + cb);
|
||||
const mainPyramid = this.pyramids[pyramidName];
|
||||
mainPyramid.pyramid.loadStorageEntry(
|
||||
query,
|
||||
(entry, update, deleteEntry) => {
|
||||
const oldWasObject = isObject(entry);
|
||||
const oldEntry = _.cloneDeep(entry);
|
||||
var entryDeleted;
|
||||
entry = cb(entry, () => {
|
||||
if (oldWasObject)
|
||||
this.#updateDeleteRef(deleteEntry, oldEntry, mainPyramid.refs);
|
||||
else deleteEntry();
|
||||
entryDeleted = true;
|
||||
});
|
||||
if (entry == null || entryDeleted || _.isEqual(oldEntry, entry)) return;
|
||||
update(entry);
|
||||
//If there are no ref objects, just return.
|
||||
if (!(mainPyramid.refs instanceof Array)) return;
|
||||
const newIsObject = isObject(entry);
|
||||
//If both are objects, compare a difference in the refs,
|
||||
if (oldWasObject && newIsObject) {
|
||||
for (var r of mainPyramid.refs) {
|
||||
if (!_.isEqual(oldEntry[r], entry[r]))
|
||||
this.#updateRef(this.pyramids[r], oldEntry[r], entry[r], query);
|
||||
}
|
||||
//If only the old is an object, remove the old references
|
||||
} else if (oldWasObject && !newIsObject) {
|
||||
for (var r of mainPyramid.refs) {
|
||||
if (entry[r] != null)
|
||||
this.pyramids[r].pyramid.removeStorageEntry(oldEntry[r]);
|
||||
}
|
||||
//If only the new is an object, only add the new references
|
||||
} else if (!oldWasObject && newIsObject) {
|
||||
for (var r of mainPyramid.refs) {
|
||||
if (entry[r] != null)
|
||||
this.pyramids[r].pyramid.addStorageEntry(entry[r], query);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
loadEntry(query, pyramidName) {
|
||||
const data = this.pyramids[pyramidName].pyramid.loadStorageEntry(query);
|
||||
return data;
|
||||
}
|
||||
|
||||
loadEntryByReference(query, refPyramid, targetPyramid) {
|
||||
const ref = this.loadEntry(query, refPyramid);
|
||||
if (ref == null) return;
|
||||
if (typeof ref !== "string") throw new Error("Pointer Must Be a String!");
|
||||
return this.loadEntry(ref, targetPyramid);
|
||||
}
|
||||
|
||||
updateEntryByReference(query, refPyramid, targetPyramid, cb) {
|
||||
const ref = this.loadEntry(query, refPyramid);
|
||||
if (ref == null) return;
|
||||
if (typeof ref !== "string") throw new Error("Pointer Must Be a String!");
|
||||
this.updateEntry(ref, targetPyramid, cb);
|
||||
}
|
||||
|
||||
#buildSchema(schema) {
|
||||
var pyramidPath, schem;
|
||||
for (var s in schema) {
|
||||
pyramidPath = joinPath(this.desertPath, s);
|
||||
if (fexists(pyramidPath)) schem = { pyramid: Pyramid.load(pyramidPath) };
|
||||
else
|
||||
schem = {
|
||||
pyramid: new Pyramid(
|
||||
pyramidPath,
|
||||
schema[s].tokenList,
|
||||
schema[s].tokenSplitters,
|
||||
schema[s].entrySplit
|
||||
),
|
||||
};
|
||||
if (schema[s].attr != null) schem.attr = schema[s].attr;
|
||||
if (schema[s].refs != null) schem.refs = schema[s].refs;
|
||||
this.pyramids[s] = schem;
|
||||
}
|
||||
}
|
||||
|
||||
#updateRef(pyramidSchema, oldRef, newRef, pointer) {
|
||||
if (oldRef == null && pointer != null)
|
||||
pyramidSchema.pyramid.addStorageEntry(newRef, pointer);
|
||||
else if (oldRef != null && newRef != null)
|
||||
pyramidSchema.pyramid.moveStorageEntry(oldRef, newRef);
|
||||
else if (oldRef != null && newRef == null)
|
||||
pyramidSchema.pyramid.removeStorageEntry(oldRef);
|
||||
}
|
||||
|
||||
#updateDeleteRef(pyramidDelete, entry, refs) {
|
||||
//Calls Delete within the loadStorageEntry function
|
||||
pyramidDelete();
|
||||
if (refs != null && refs instanceof Array)
|
||||
for (var r of refs) {
|
||||
if (entry[r] != null)
|
||||
this.pyramids[r].pyramid.removeStorageEntry(entry[r]);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
|
@ -1,365 +0,0 @@
|
|||
//Module Imports
|
||||
const {
|
||||
mkdirSync: mkdir,
|
||||
existsSync: fexists,
|
||||
readFileSync: fread,
|
||||
writeFileSync: fwrite,
|
||||
rmSync: fremove,
|
||||
} = require("fs");
|
||||
const { join: joinPath, basename } = require("path");
|
||||
//Constants
|
||||
const pyramidEncoding = "utf8";
|
||||
const pointerExtension = ".json";
|
||||
const storageExtension = ".json";
|
||||
module.exports = class Pyramid {
|
||||
static load(filePath) {
|
||||
const pointerFile = filePath + pointerExtension;
|
||||
if (!fexists(pointerFile))
|
||||
throw new Error(`Pyramid Not Found At ${pointerFile}`);
|
||||
const pointerData = JSON.parse(fread(pointerFile, pyramidEncoding));
|
||||
return new Pyramid(
|
||||
filePath,
|
||||
pointerData.tokenList,
|
||||
pointerData.tokenSplitters,
|
||||
pointerData.entrySplit,
|
||||
false
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Constructor for Pyramid
|
||||
* Note: entrySplit is set to 250 automatically
|
||||
* This was the best balance of ram usage and time
|
||||
* Sample size was 50k items time: 63s average
|
||||
*/
|
||||
constructor(
|
||||
filePath,
|
||||
tokenList,
|
||||
tokenSplitters,
|
||||
entrySplit = 250,
|
||||
create = true
|
||||
) {
|
||||
if (filePath == null) throw new Error(`Filepath cannot be: ${filePath}`);
|
||||
if (tokenSplitters == null)
|
||||
throw new Error(`tokenSplitters cannot be: ${tokenSplitters}`);
|
||||
this.name = basename(filePath);
|
||||
this.pointerFile = filePath + pointerExtension;
|
||||
this.storagePath = filePath;
|
||||
this.tokenSplitters = tokenSplitters;
|
||||
this.entrySplit = entrySplit;
|
||||
this.tokenList = tokenList;
|
||||
//Create Physical Directories
|
||||
if (create) {
|
||||
this.#createPyramid();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Writes Entry to a queried storage
|
||||
*/
|
||||
addStorageEntry(query, entry) {
|
||||
var pointer = this.#loadPointer();
|
||||
var storageToken = this.#queryStorageToken(query, pointer);
|
||||
var storageCount = this.#getStorageCount(storageToken, pointer);
|
||||
if (storageCount + 1 >= this.entrySplit) {
|
||||
pointer = this.#splitStorage(storageToken);
|
||||
storageToken = this.#queryStorageToken(query, pointer);
|
||||
}
|
||||
var storageData = this.#loadStorage(storageToken);
|
||||
if (storageData[query] == null)
|
||||
this.#modifyStorageCount(storageToken, 1, pointer);
|
||||
storageData[query] = entry;
|
||||
this.#writeStorage(storageToken, storageData);
|
||||
}
|
||||
/**
|
||||
* Removes Entry from a queried storage
|
||||
*/
|
||||
removeStorageEntry(query) {
|
||||
const pointer = this.#loadPointer();
|
||||
const storageToken = this.#queryStorageToken(query, pointer);
|
||||
var storageData = this.#loadStorage(storageToken);
|
||||
const entry = storageData[query];
|
||||
delete storageData[query];
|
||||
if (entry != null) this.#modifyStorageCount(storageToken, -1, pointer);
|
||||
this.#writeStorage(storageToken, storageData);
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load User Entry from a query
|
||||
*/
|
||||
loadStorageEntry(query, cb) {
|
||||
const pointer = this.#loadPointer();
|
||||
const storageToken = this.#queryStorageToken(query, pointer);
|
||||
var storageData = this.#loadStorage(storageToken);
|
||||
const entry = storageData[query];
|
||||
if (cb !== null && typeof cb === "function") {
|
||||
cb(
|
||||
entry,
|
||||
//Update Function
|
||||
(modifiedEntry) => {
|
||||
if (storageData[query] != null)
|
||||
this.#writeEntryChanges(
|
||||
query,
|
||||
storageToken,
|
||||
storageData,
|
||||
modifiedEntry
|
||||
);
|
||||
else this.addStorageEntry(query, modifiedEntry);
|
||||
},
|
||||
//Delete Function
|
||||
() => {
|
||||
delete storageData[query];
|
||||
if (entry != null)
|
||||
this.#modifyStorageCount(storageToken, -1, pointer);
|
||||
this.#writeStorage(storageToken, storageData);
|
||||
}
|
||||
);
|
||||
} else return entry;
|
||||
}
|
||||
/**
|
||||
* Modify entry provided by a query
|
||||
*/
|
||||
modifyStorageEntry(query, entry) {
|
||||
const pointer = this.#loadPointer();
|
||||
const storageToken = this.#queryStorageToken(query, pointer);
|
||||
var storageData = this.#loadStorage(storageToken);
|
||||
if (storageData[entry] == null)
|
||||
throw new Error(`Query ${query} not found!`);
|
||||
storageData[query] = entry;
|
||||
this.#writeStorage(storageToken, storageData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves one queries entry to another queries entry.
|
||||
*/
|
||||
moveStorageEntry(oldQuery, newQuery) {
|
||||
const pointer = this.#loadPointer();
|
||||
const oldStorageToken = this.#queryStorageToken(oldQuery);
|
||||
const newStorageToken = this.#queryStorageToken(newQuery);
|
||||
if (oldStorageToken === newStorageToken) return;
|
||||
//Remove old data
|
||||
var storageData = this.#loadStorage(oldStorageToken);
|
||||
if (storageData == null) return;
|
||||
const entry = storageData[oldQuery];
|
||||
if (entry == null) return;
|
||||
delete storageData[oldQuery];
|
||||
this.#modifyStorageCount(oldStorageToken, -1, pointer);
|
||||
this.#writeStorage(oldStorageToken, storageData);
|
||||
//Add new data
|
||||
storageData = this.#loadStorage(newStorageToken);
|
||||
if (storageData[newQuery] == null)
|
||||
this.#modifyStorageCount(newStorageToken, 1, pointer);
|
||||
storageData[newQuery] = entry;
|
||||
this.#writeStorage(newStorageToken, storageData);
|
||||
}
|
||||
/**
|
||||
* Writes storageData to specific storage
|
||||
*/
|
||||
#writeEntryChanges(query, storageToken, storageData, modifiedEntry) {
|
||||
storageData[query] = modifiedEntry;
|
||||
this.#writeStorage(storageToken, storageData);
|
||||
}
|
||||
/**
|
||||
* Returns Count of entries in storage (indexed by the pointer)
|
||||
*/
|
||||
#getStorageCount(storageToken, pointer = this.#loadPointer()) {
|
||||
var parent = pointer.entries;
|
||||
for (var st of storageToken) {
|
||||
parent = parent[st];
|
||||
}
|
||||
return parent;
|
||||
}
|
||||
/**
|
||||
* Sets Count of entries in storage (stored in pointerFile)
|
||||
*/
|
||||
#setStorageCount(storageToken, value, pointer = this.#loadPointer()) {
|
||||
var parentStack = [];
|
||||
var parent = pointer.entries;
|
||||
//Create "stack" of the parent references
|
||||
for (var st of storageToken) {
|
||||
parentStack.push(parent);
|
||||
parent = parent[st];
|
||||
}
|
||||
//Add Value to the stack
|
||||
parentStack.push(value);
|
||||
//Add the modified child to the previous parent, rinse, repeat, victory!
|
||||
for (var p = parentStack.length - 1; p >= 0; p--) {
|
||||
if (parentStack[p - 1] == null) break;
|
||||
parentStack[p - 1][storageToken[p - 1]] = parentStack[p];
|
||||
}
|
||||
pointer.entries = parentStack[0];
|
||||
this.#writePointer(pointer);
|
||||
return pointer;
|
||||
}
|
||||
/**
|
||||
* Modifies the storageCount by the given value
|
||||
*/
|
||||
#modifyStorageCount(storageToken, value, pointer = this.#loadPointer()) {
|
||||
var parentStack = [];
|
||||
var parent = pointer.entries;
|
||||
//Create "stack" of the parent references
|
||||
for (var st of storageToken) {
|
||||
parentStack.push(parent);
|
||||
parent = parent[st];
|
||||
}
|
||||
//Mdofiy original value and add it to the stack
|
||||
parentStack.push(parent + value);
|
||||
//Add the modified child to the previous parent, rinse, repeat, victory!
|
||||
for (var p = parentStack.length - 1; p >= 0; p--) {
|
||||
if (parentStack[p - 1] == null) break;
|
||||
parentStack[p - 1][storageToken[p - 1]] = parentStack[p];
|
||||
}
|
||||
pointer.entries = parentStack[0];
|
||||
this.#writePointer(pointer);
|
||||
return pointer;
|
||||
}
|
||||
/**
|
||||
* Returns storageToken that would contain query
|
||||
*/
|
||||
#queryStorageToken(query, pointer = this.#loadPointer()) {
|
||||
var parent = pointer.entries;
|
||||
var tokenLocation;
|
||||
var tokenStack = "";
|
||||
for (var l = 0; l < query.length; l++) {
|
||||
//Get the category to 'sink' current letter into
|
||||
if (this.tokenSplitters.includes(query[l])) tokenLocation = query[l];
|
||||
else tokenLocation = this.#queryStorageTokenSplitter(query[l]);
|
||||
tokenStack += tokenLocation;
|
||||
//If tokenStack is exactly their token ex:amamam
|
||||
//Immediately append the first part of the letter stack
|
||||
if (tokenStack === query) {
|
||||
var extraTokens = 0;
|
||||
while ((parent = parent[this.tokenSplitters[0]])) extraTokens++;
|
||||
return tokenStack + this.tokenSplitters[0].repeat(extraTokens);
|
||||
}
|
||||
//If Parent has children, keep going
|
||||
if (
|
||||
typeof parent[tokenLocation] === "object" &&
|
||||
!(parent[tokenLocation] instanceof Array)
|
||||
) {
|
||||
parent = parent[tokenLocation];
|
||||
} else break;
|
||||
}
|
||||
return tokenStack;
|
||||
}
|
||||
/**
|
||||
* Returns token that should envelop the queried token
|
||||
* EX: tokenSplitters = [a,m] Query = c returns a
|
||||
*/
|
||||
#queryStorageTokenSplitter(query) {
|
||||
const queryIndex = this.tokenList.indexOf(query);
|
||||
if (queryIndex === -1) return this.tokenSplitters[0];
|
||||
for (var s in this.tokenSplitters) {
|
||||
if (this.tokenList.indexOf(this.tokenSplitters[s]) >= queryIndex)
|
||||
return this.tokenSplitters[Math.max(0, s - 1)];
|
||||
}
|
||||
return this.tokenSplitters[s];
|
||||
}
|
||||
/**
|
||||
* Creates Pointer File and intializes all storages
|
||||
*/
|
||||
#createPyramid() {
|
||||
if (fexists(this.pointerFile)) {
|
||||
console.warn(
|
||||
"Warning: DatabasePointer already exists!",
|
||||
this.pointerFile,
|
||||
"Skipping.."
|
||||
);
|
||||
return;
|
||||
}
|
||||
var databaseSchema = {
|
||||
entrySplit: this.entrySplit,
|
||||
tokenSplitters: this.tokenSplitters,
|
||||
tokenList: this.tokenList,
|
||||
entries: {},
|
||||
};
|
||||
if (!fexists(this.storagePath)) mkdir(this.storagePath);
|
||||
//Add All
|
||||
for (var s of this.tokenSplitters) {
|
||||
databaseSchema.entries[s] = 0;
|
||||
this.#writeStorage(s, {});
|
||||
}
|
||||
this.#writePointer(databaseSchema);
|
||||
}
|
||||
/**
|
||||
* Writes data to Pyramid Storage based on storageToken
|
||||
*/
|
||||
#writeStorage(storageToken, data) {
|
||||
const storageFile = this.#storageFile(storageToken);
|
||||
fwrite(storageFile, JSON.stringify(data));
|
||||
}
|
||||
/**
|
||||
* Write data pointerFile
|
||||
*/
|
||||
#writePointer(data) {
|
||||
fwrite(this.pointerFile, JSON.stringify(data));
|
||||
}
|
||||
/**
|
||||
* Load pointerFile Object
|
||||
*/
|
||||
#loadPointer() {
|
||||
return JSON.parse(fread(this.pointerFile, pyramidEncoding));
|
||||
}
|
||||
/**
|
||||
* Loads storage object given a storageToken
|
||||
*/
|
||||
#loadStorage(storageToken) {
|
||||
return JSON.parse(fread(this.#storageFile(storageToken), pyramidEncoding));
|
||||
}
|
||||
/**
|
||||
* Deletes a particular storage
|
||||
*/
|
||||
#removeStorage(storageToken) {
|
||||
const storageFile = this.#storageFile(storageToken);
|
||||
if (fexists(storageFile)) fremove(storageFile);
|
||||
}
|
||||
/**
|
||||
* Joins the storageToken path and pyramidpath
|
||||
*/
|
||||
#storageFile(token) {
|
||||
return joinPath(this.storagePath, token + storageExtension);
|
||||
}
|
||||
/**
|
||||
* Splits the specified storage (located with storageToken)
|
||||
* into the different files specified by this.tokenSplitters
|
||||
*/
|
||||
#splitStorage(oldToken, pointer = this.#loadPointer()) {
|
||||
const oldStorage = this.#loadStorage(oldToken);
|
||||
const oldKeys = Object.keys(oldStorage);
|
||||
var newStorages = {};
|
||||
var newStorageCount = {};
|
||||
//Set Default Count and create empty objects for new storage
|
||||
for (var s of this.tokenSplitters) {
|
||||
newStorageCount[s] = 0;
|
||||
newStorages[oldToken + s] = {};
|
||||
}
|
||||
/*Calculate next token to add to the storagePath for each entry
|
||||
*If the token is already <= the length of the new token
|
||||
*add the standard first token.
|
||||
*Ex: entry: mouse into mouse -> mousea
|
||||
*Else calculate next letter
|
||||
*
|
||||
* Then add to new storage count and assign storage
|
||||
* In theory the total ram useage should never be more than double
|
||||
* than the original size of the storage
|
||||
*/
|
||||
var migratedTokenStack, additionalToken;
|
||||
for (var k of oldKeys) {
|
||||
migratedTokenStack = oldToken;
|
||||
if (k.length <= oldToken.length + 1)
|
||||
additionalToken = this.tokenSplitters[0];
|
||||
else
|
||||
additionalToken = this.#queryStorageTokenSplitter(
|
||||
this.tokenSplitters,
|
||||
k[oldToken.length]
|
||||
);
|
||||
migratedTokenStack += additionalToken;
|
||||
newStorages[migratedTokenStack][k] = oldStorage[k];
|
||||
newStorageCount[additionalToken]++;
|
||||
}
|
||||
for (var st in newStorages) {
|
||||
this.#writeStorage(st, newStorages[st]);
|
||||
}
|
||||
this.#removeStorage(oldToken);
|
||||
return this.#setStorageCount(oldToken, newStorageCount);
|
||||
}
|
||||
};
|
|
@ -17,9 +17,9 @@ const cairoMiddleware = (req, res, next) => {
|
|||
headers: { authorization: `Bearer ${req.token}` },
|
||||
})
|
||||
.then((authRes) => {
|
||||
if (authRes.status !== 200) return res.status(authres.status);
|
||||
if (authRes.data != null && authRes.data.uuid != null) {
|
||||
asUser.load(authRes.data.uuid).then((user) => {
|
||||
if (authRes.status !== 200) return res.sendStatus(authres.status);
|
||||
if (authRes.data != null && authRes.data.id != null) {
|
||||
asUser.load(authRes.data.id).then((user) => {
|
||||
req.user = user;
|
||||
next();
|
||||
});
|
||||
|
@ -30,14 +30,15 @@ const cairoMiddleware = (req, res, next) => {
|
|||
else res.sendStatus(500);
|
||||
});
|
||||
};
|
||||
router.use(cairoMiddleware);
|
||||
|
||||
const authMiddleware = (req, res, next) => {
|
||||
if (req.token == null) return res.sendStatus(401);
|
||||
next();
|
||||
};
|
||||
router.use(cairoMiddleware);
|
||||
|
||||
router.get("/files", authMiddleware, (req, res) =>
|
||||
asUser.getOwnedFiles(req.user._id).then((files) => {
|
||||
asUser.getOwnedFiles(req.user.owned).then((files) => {
|
||||
res.status(200).json(files);
|
||||
})
|
||||
);
|
||||
|
|
|
@ -3,7 +3,7 @@ const Schema = mongoose.Schema;
|
|||
const ObjId = mongoose.Types.ObjectId;
|
||||
const user = new Schema(
|
||||
{
|
||||
cairoUuid: {
|
||||
cairoId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
|
|
|
@ -1,15 +1,12 @@
|
|||
//Imports
|
||||
const express = require("express");
|
||||
const session = require("express-session");
|
||||
const cors = require("cors");
|
||||
const bodyParser = require("body-parser");
|
||||
const bearerToken = require('express-bearer-token');
|
||||
const secret = require("uuid-with-v6").v6;
|
||||
const bearerToken = require("express-bearer-token");
|
||||
//Local Imports
|
||||
const { Web, StatusCode, Server } = require("./config.json");
|
||||
//Import Routers
|
||||
const stashRouter = require("./routes/stash");
|
||||
const storage = require("./api/storage");
|
||||
//Define Constants & Setup Database
|
||||
const app = express();
|
||||
const port = Server.Port;
|
||||
|
@ -21,8 +18,7 @@ const corsOptions = {
|
|||
};
|
||||
//Set Up Express session and View engine
|
||||
app.use(cors(corsOptions));
|
||||
app.use(bearerToken())
|
||||
app.use(session({ secret: secret(), saveUninitialized: false, resave: false }));
|
||||
app.use(bearerToken());
|
||||
app.use(bodyParser.json({ limit: Server.BodyLimit })); // parse application/json
|
||||
app.use(bodyParser.urlencoded({ limit: Server.BodyLimit, extended: false })); // parse application/x-www-form-urlencoded
|
||||
//Test if there is a
|
||||
|
@ -41,6 +37,5 @@ const startServer = () => {
|
|||
console.log("Recieved Shutdown Signal!");
|
||||
process.exit();
|
||||
});
|
||||
setInterval(() => storage.cleanZips(), Server.ZipRemovalInterval);
|
||||
};
|
||||
startServer();
|
||||
|
|
Reference in a new issue