Compare commits
1 commit
master
...
ep/Feb9-20
Author | SHA1 | Date | |
---|---|---|---|
82da329536 |
45 changed files with 1843 additions and 1655 deletions
|
@ -1,31 +0,0 @@
|
|||
# name: Deploy Edge Proxy
|
||||
# run-name: ${{ gitea.actor }} Deploy Edge Proxy
|
||||
# on:
|
||||
# push:
|
||||
# branches: [ master ]
|
||||
|
||||
# env:
|
||||
# GARDEN_DEPLOY_ACTION: minecluster-proxy
|
||||
|
||||
# jobs:
|
||||
# deploy-edge:
|
||||
# steps:
|
||||
# # Setup Oasis
|
||||
# - name: Oasis Setup
|
||||
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
|
||||
# with:
|
||||
# deploy-env: edge
|
||||
# infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
|
||||
# # Deploy to Edge Cluster
|
||||
# - name: Deploy to Edge Cluster
|
||||
# run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
|
||||
# working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
# # Alert via Discord
|
||||
# - name: Discord Alert
|
||||
# if: always()
|
||||
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
|
||||
# with:
|
||||
# status: ${{ job.status }}
|
||||
# channel: deployments
|
||||
# header: DEPLOY EDGE
|
||||
# additional-content: "Minecluster Proxy"
|
|
@ -1,44 +0,0 @@
|
|||
name: Deploy USW-MC
|
||||
run-name: ${{ forgejo.actor }} Deploy USW-MC
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
env:
|
||||
GARDEN_DEPLOY_ACTION: minecluster
|
||||
|
||||
jobs:
|
||||
deploy-edge:
|
||||
steps:
|
||||
# Configure proper kubeconfig (Used when cluster does not match the edge environment)
|
||||
- name: Get usw-mc deployment kubeconfig
|
||||
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@infisical-env
|
||||
with:
|
||||
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
|
||||
project-id: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
|
||||
secret-envs: edge
|
||||
secret-paths: /kubernetes/usw-mc
|
||||
# Setup Oasis
|
||||
- name: Oasis Setup
|
||||
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
|
||||
with:
|
||||
deploy-env: edge
|
||||
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
|
||||
infisical-project: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
|
||||
extra-secret-paths: /dashboard
|
||||
extra-secret-envs: edge
|
||||
# Deploy to Edge
|
||||
- name: Deploy to Edge env
|
||||
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
|
||||
working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
env: # (Used when cluster does not match the edge environment)
|
||||
MCL_KUBECONFIG: ${{ env.KUBERNETES_CONFIG_USW_MC }}
|
||||
# Alert via Discord
|
||||
- name: Discord Alert
|
||||
if: always()
|
||||
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@discord-status
|
||||
with:
|
||||
status: ${{ job.status }}
|
||||
channel: deployments
|
||||
header: DEPLOY MC
|
||||
additional-content: "Minecluster Server Manager Deployment"
|
|
@ -1,42 +0,0 @@
|
|||
# name: QA API Tests
|
||||
# run-name: ${{ gitea.actor }} QA API Test
|
||||
# on:
|
||||
# pull_request:
|
||||
# branches: [ master ]
|
||||
|
||||
# env:
|
||||
# REPO_DIR: ${{ gitea.workspace }}/minecluster
|
||||
# GARDEN_LINK_ACTION: build.minecluster-image
|
||||
|
||||
# jobs:
|
||||
# qa-api-tests:
|
||||
# steps:
|
||||
# # Setup Oasis
|
||||
# - name: Oasis Setup
|
||||
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
|
||||
# with:
|
||||
# deploy-env: ci
|
||||
# infisical-token: ${{ secrets.INFISICAL_ELYSIUM_CI_READ_TOKEN }}
|
||||
# # Test Code
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v3
|
||||
# with:
|
||||
# path: ${{ env.REPO_DIR }}
|
||||
# # Garden link
|
||||
# - name: Link Repo code to Garden
|
||||
# run: garden link action $GARDEN_LINK_ACTION $REPO_DIR --env usw-ci --var cubit-projects=cairo,minecluster
|
||||
# working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
# # Cubit CI Tests
|
||||
# - name: Run Cubit tests in CI env
|
||||
# run: garden workflow qa-api-tests --env usw-ci --var ci-ttl=25m
|
||||
# working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
# # Discord Alert
|
||||
# - name: Discord Alert
|
||||
# if: always()
|
||||
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
|
||||
# with:
|
||||
# status: ${{ job.status }}
|
||||
# channel: ci
|
||||
# header: QA API Tests
|
||||
# additional-content: "CI Namespace: `${{env.CI_NAMESPACE}}`"
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
name: S3 Repo Backup
|
||||
run-name: ${{ forgejo.actor }} S3 Repo Backup
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
s3-repo-backup:
|
||||
steps:
|
||||
- name: S3 Backup
|
||||
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@s3-backup
|
||||
with:
|
||||
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
|
||||
infisical-project: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
|
||||
- name: Status Alert
|
||||
if: always()
|
||||
run: echo "The Job ended with status ${{ job.status }}."
|
23
.gitea/workflows/deploy-edge-proxy.yml
Normal file
23
.gitea/workflows/deploy-edge-proxy.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
name: Deploy Edge Proxy
|
||||
run-name: ${{ gitea.actor }} Deploy Edge Proxy
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.ELYSIUM_ORG_READ_TOKEN }}
|
||||
KUBECONFIG_BASE64: ${{ secrets.KUBECONFIG_USW_EDGE }}
|
||||
GARDEN_DEPLOY_ACTION: minecluster-proxy
|
||||
|
||||
|
||||
jobs:
|
||||
deploy-edge:
|
||||
steps:
|
||||
- name: Oasis Setup
|
||||
uses: https://gitea.dunemask.dev/elysium/oasis-action@master
|
||||
with:
|
||||
gitea-token: ${{ env.GITEA_TOKEN }}
|
||||
kubeconfig: ${{ env.KUBECONFIG_BASE64 }}
|
||||
- name: Deploy to Edge env
|
||||
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
|
||||
working-directory: ${{ env.OASIS_WORKSPACE }}
|
26
.gitea/workflows/deploy-edge.yml
Normal file
26
.gitea/workflows/deploy-edge.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
name: Deploy USW-MC
|
||||
run-name: ${{ gitea.actor }} Deploy USW-MC
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.ELYSIUM_ORG_READ_TOKEN }}
|
||||
KUBECONFIG_BASE64: ${{ secrets.KUBECONFIG_USW_MC }}
|
||||
GARDEN_DEPLOY_ACTION: minecluster
|
||||
# Additional Deploy Envars
|
||||
POSTGRES_PROD_PASSWORD: ${{ secrets.POSTGRES_PROD_PASSWORD }}
|
||||
MCL_KUBECONFIG: ${{ secrets.KUBECONFIG_USW_MC }}
|
||||
|
||||
|
||||
jobs:
|
||||
deploy-edge:
|
||||
steps:
|
||||
- name: Oasis Setup
|
||||
uses: https://gitea.dunemask.dev/elysium/oasis-action@master
|
||||
with:
|
||||
gitea-token: ${{ env.GITEA_TOKEN }}
|
||||
kubeconfig: ${{ env.KUBECONFIG_BASE64 }}
|
||||
- name: Deploy to Edge env
|
||||
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-mc
|
||||
working-directory: ${{ env.OASIS_WORKSPACE }}
|
36
.gitea/workflows/qa-api-tests.yml
Normal file
36
.gitea/workflows/qa-api-tests.yml
Normal file
|
@ -0,0 +1,36 @@
|
|||
name: QA API Tests
|
||||
run-name: ${{ gitea.actor }} QA API Test
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
REPO_DIR: ${{ gitea.workspace }}/minecluster
|
||||
KUBECONFIG_BASE64: ${{ secrets.KUBECONFIG_USW_DEV }}
|
||||
GITEA_TOKEN: ${{ secrets.ELYSIUM_ORG_READ_TOKEN }}
|
||||
GARDEN_LINK_ACTION: build.minecluster-image
|
||||
|
||||
jobs:
|
||||
qa-api-tests:
|
||||
steps:
|
||||
- name: Oasis Setup
|
||||
uses: https://gitea.dunemask.dev/elysium/oasis-action@master
|
||||
with:
|
||||
gitea-token: ${{ env.GITEA_TOKEN }}
|
||||
kubeconfig: ${{ env.KUBECONFIG_BASE64 }}
|
||||
# Test Code
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: ${{ env.REPO_DIR }}
|
||||
# Garden tests
|
||||
- name: Link Repo code to Garden
|
||||
run: garden link action $GARDEN_LINK_ACTION $REPO_DIR --env usw-ci --var cubit-projects=cairo,minecluster
|
||||
working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
# Cubit CI Tests
|
||||
- name: Run Cubit tests in CI env
|
||||
run: garden workflow qa-api-tests --env usw-ci --var ci-ttl=25
|
||||
working-directory: ${{ env.OASIS_WORKSPACE }}
|
||||
- name: Status Alert
|
||||
if: always()
|
||||
run: echo "The Job ended with status ${{ job.status }}."
|
31
.gitea/workflows/s3-repo-backup.yml
Normal file
31
.gitea/workflows/s3-repo-backup.yml
Normal file
|
@ -0,0 +1,31 @@
|
|||
name: S3 Repo Backup
|
||||
run-name: ${{ gitea.actor }} S3 Repo Backup
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
env:
|
||||
S3_BACKUP_ENDPOINT: https://s3.dunemask.dev
|
||||
S3_BACKUP_KEY_ID: gitea-repo-backup
|
||||
S3_BACKUP_KEY: ${{ secrets.S3_REPO_BACKUP_KEY }}
|
||||
REPO_DIR: ${{ gitea.workspace }}/${{ gitea.respository }}
|
||||
jobs:
|
||||
s3-repo-backup:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: ${{ env.REPO_DIR }}
|
||||
- name: S3 Backup
|
||||
uses: peter-evans/s3-backup@v1
|
||||
env:
|
||||
ACCESS_KEY_ID: ${{ env.S3_BACKUP_KEY_ID }}
|
||||
SECRET_ACCESS_KEY: ${{ env.S3_BACKUP_KEY }}
|
||||
MIRROR_SOURCE: ${{ env.REPO_DIR }}
|
||||
MIRROR_TARGET: backups/gitea-repositories/${{ gitea.repository }}
|
||||
STORAGE_SERVICE_URL: ${{env.S3_BACKUP_ENDPOINT}}
|
||||
with:
|
||||
args: --overwrite --remove
|
||||
- name: Status Alert
|
||||
if: always()
|
||||
run: echo "The Job ended with status ${{ job.status }}."
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,3 +1,2 @@
|
|||
node_modules/
|
||||
.env
|
||||
|
||||
|
|
2
dist/app.js
vendored
2
dist/app.js
vendored
|
@ -8,4 +8,4 @@ const kc = new k8s.KubeConfig();
|
|||
kc.loadFromDefault();
|
||||
}
|
||||
|
||||
main().catch((e)=>{console.error(e)});
|
||||
main().catch((e)=>{console.log(e)});
|
||||
|
|
|
@ -4,7 +4,6 @@ import {
|
|||
listServerFiles,
|
||||
removeServerItem,
|
||||
uploadServerItem,
|
||||
moveServerItems,
|
||||
} from "../k8s/server-files.js";
|
||||
import { sendError } from "../util/ExpressClientError.js";
|
||||
import { checkAuthorization } from "../database/queries/server-queries.js";
|
||||
|
@ -80,18 +79,3 @@ export async function getItem(req, res) {
|
|||
})
|
||||
.catch(sendError(res));
|
||||
}
|
||||
|
||||
export async function moveItems(req, res) {
|
||||
const serverSpec = req.body;
|
||||
if (!serverSpec.id) return res.status(400).send("Server id missing!");
|
||||
if (!serverSpec.destination)
|
||||
return res.status(400).send("Destination required!");
|
||||
if (!serverSpec.origin) return res.status(400).send("Origin required!");
|
||||
if (!serverSpec.files || !Array.isArray(serverSpec.files))
|
||||
return res.status(400).send("Files required!");
|
||||
const authorized = await checkAuthorization(serverSpec.id, req.cairoId);
|
||||
if (!authorized) return res.sendStatus(403);
|
||||
moveServerItems(serverSpec)
|
||||
.then(() => res.sendStatus(200))
|
||||
.catch(sendError(res));
|
||||
}
|
||||
|
|
|
@ -9,8 +9,6 @@ import {
|
|||
import ExpressClientError, { sendError } from "../util/ExpressClientError.js";
|
||||
import { toggleServer } from "../k8s/k8s-server-control.js";
|
||||
import { checkAuthorization } from "../database/queries/server-queries.js";
|
||||
import { WARN } from "../util/logging.js";
|
||||
import modifyServerResources from "../k8s/server-modify.js";
|
||||
|
||||
const dnsRegex = new RegExp(
|
||||
`^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$`,
|
||||
|
@ -71,9 +69,6 @@ function payloadFilter(req, res) {
|
|||
return res
|
||||
.status(400)
|
||||
.send("Extra ports must be a list of strings with length of 5!");
|
||||
if (host !== host.toLowerCase())
|
||||
WARN("CREATE", "Host automatically being lowercasified...");
|
||||
req.body.host = host.toLowerCase();
|
||||
return "filtered";
|
||||
}
|
||||
|
||||
|
@ -163,15 +158,10 @@ export async function getServer(req, res) {
|
|||
export async function modifyServer(req, res) {
|
||||
if (payloadFilter(req, res) !== "filtered") return;
|
||||
const serverSpec = req.body;
|
||||
if (!!serverSpec.host)
|
||||
WARN(
|
||||
"MODIFY",
|
||||
"Warning, hostname changing is not implimented yet! Please ask the developer if you'd like to see this added!",
|
||||
);
|
||||
try {
|
||||
await checkServerId(req.cairoId, serverSpec);
|
||||
const serverEntry = await modifyServerEntry(serverSpec);
|
||||
await modifyServerResources(serverEntry);
|
||||
// await createServerResources(serverEntry);
|
||||
res.sendStatus(200);
|
||||
} catch (e) {
|
||||
sendError(res)(e);
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
import { S3, GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
import { basename } from "node:path";
|
||||
import { getServerEntry } from "../database/queries/server-queries.js";
|
||||
import { ERR } from "../util/logging.js";
|
||||
import { checkAuthorization } from "../database/queries/server-queries.js";
|
||||
const s3Region = "us-east-1";
|
||||
|
||||
async function getS3BackupData(serverId) {
|
||||
const serverEntry = await getServerEntry(serverId);
|
||||
if (!serverEntry?.backupHost) return undefined;
|
||||
const s3Config = {
|
||||
credentials: {
|
||||
accessKeyId: serverEntry.backupId,
|
||||
secretAccessKey: serverEntry.backupKey,
|
||||
},
|
||||
endpoint: `https://${serverEntry.backupHost}`,
|
||||
forcePathStyle: true,
|
||||
region: s3Region,
|
||||
};
|
||||
const pathParts = serverEntry.backupPath.split("/");
|
||||
if (pathParts[0] === "") pathParts.shift();
|
||||
const bucket = pathParts.shift();
|
||||
const backupPrefix = pathParts.join("/");
|
||||
return { s3Config, bucket, backupPrefix };
|
||||
}
|
||||
|
||||
export async function listS3Backups(req, res) {
|
||||
const serverSpec = req.body;
|
||||
if (!serverSpec.id) return res.status(400).send("Server id missing!");
|
||||
const authorized = await checkAuthorization(serverSpec.id, req.cairoId);
|
||||
if (!authorized)
|
||||
return res
|
||||
.status(403)
|
||||
.send("You do not have permission to access that server!");
|
||||
const s3Data = await getS3BackupData(serverSpec.id);
|
||||
if (!s3Data) return res.status(409).send("Backup not configured!");
|
||||
const { s3Config, bucket, backupPrefix } = s3Data;
|
||||
const s3Client = new S3(s3Config);
|
||||
try {
|
||||
const listResponse = await s3Client.listObjectsV2({
|
||||
Bucket: bucket,
|
||||
Prefix: backupPrefix,
|
||||
});
|
||||
const files =
|
||||
listResponse.Contents?.map((f) => ({
|
||||
name: basename(f.Key),
|
||||
lastModified: f.LastModified,
|
||||
path: f.Key,
|
||||
size: f.Size,
|
||||
})) ?? [];
|
||||
res.json(files);
|
||||
} catch (e) {
|
||||
ERR("S3", e);
|
||||
res.sendStatus(500);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getS3BackupUrl(req, res) {
|
||||
const serverSpec = req.body;
|
||||
if (!serverSpec.id) return res.status(400).send("Server id missing!");
|
||||
if (!serverSpec.backupPath)
|
||||
return res.status(400).send("Backup path missing!");
|
||||
const authorized = await checkAuthorization(serverSpec.id, req.cairoId);
|
||||
if (!authorized)
|
||||
return res
|
||||
.status(403)
|
||||
.send("You do not have permission to access that server!");
|
||||
const s3Data = await getS3BackupData(serverSpec.id);
|
||||
if (!s3Data) return res.status(409).send("Backup not configured!");
|
||||
const { s3Config, bucket } = s3Data;
|
||||
const s3Client = new S3(s3Config);
|
||||
try {
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: serverSpec.backupPath,
|
||||
});
|
||||
const url = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
|
||||
res.json({ url });
|
||||
} catch (e) {
|
||||
ERR("S3", e);
|
||||
res.sendStatus(500);
|
||||
}
|
||||
}
|
|
@ -26,13 +26,9 @@ export async function webConsoleLogs(socket) {
|
|||
|
||||
const log = new k8s.Log(kc);
|
||||
const logStream = new stream.PassThrough();
|
||||
var logstreamBuffer = "";
|
||||
logStream.on("data", (chunk) => {
|
||||
const bufferString = Buffer.from(chunk).toString();
|
||||
if (!bufferString.includes("\n")) return (logstreamBuffer += bufferString);
|
||||
const clientChunks = `${logstreamBuffer}${bufferString}`.split("\n");
|
||||
for (var c of clientChunks) socket.emit("push", c);
|
||||
});
|
||||
logStream.on("data", (chunk) =>
|
||||
socket.emit("push", Buffer.from(chunk).toString()),
|
||||
);
|
||||
log
|
||||
.log(namespace, mcsPods[0], containerName, logStream, {
|
||||
follow: true,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
CREATE SEQUENCE servers_id_seq;
|
||||
CREATE TABLE servers (
|
||||
id bigint NOT NULL DEFAULT nextval('servers_id_seq') PRIMARY KEY,
|
||||
owner_cairo_id varchar(63),
|
||||
owner_cairo_id bigint,
|
||||
host varchar(255) DEFAULT NULL,
|
||||
name varchar(255) DEFAULT NULL,
|
||||
version varchar(63) DEFAULT 'latest',
|
||||
|
|
|
@ -16,10 +16,6 @@ const getMclName = (host, id) =>
|
|||
`${host.toLowerCase().replaceAll(".", "-")}-${id}`;
|
||||
|
||||
export async function checkAuthorization(serverId, cairoId) {
|
||||
console.log(
|
||||
`Checking Authorization for user ${cairoId} for serverId ${serverId}`,
|
||||
);
|
||||
if (!cairoId) return false;
|
||||
const q = selectWhereAllQuery(table, {
|
||||
id: serverId,
|
||||
owner_cairo_id: cairoId,
|
||||
|
@ -169,7 +165,7 @@ export async function modifyServerEntry(serverSpec) {
|
|||
id,
|
||||
// ownerCairoId: owner_cairo_id, // DIsabled! If these becomes a reqest, please create a new function!
|
||||
name,
|
||||
// host, // TODO: Can only be updated if service name is generic and non descriptive
|
||||
host,
|
||||
version,
|
||||
serverType: server_type,
|
||||
cpu, // TODO: Ignored for now by the K8S manifests
|
||||
|
@ -184,66 +180,28 @@ export async function modifyServerEntry(serverSpec) {
|
|||
backupInterval: backup_interval,
|
||||
} = serverSpec;
|
||||
|
||||
const q =
|
||||
updateWhereAllQuery(
|
||||
table,
|
||||
{
|
||||
name,
|
||||
// host, // TODO: Can only be updated if service name is generic and non descriptive
|
||||
version,
|
||||
server_type,
|
||||
cpu, // TODO: Ignored for now by the K8S manifests
|
||||
memory,
|
||||
// storage, // DO NOT INCLUDE THIS KEY, Not all storage providers in kubernetes allow for dynamically resizable PVCs
|
||||
extra_ports,
|
||||
backup_enabled,
|
||||
backup_host,
|
||||
backup_bucket_path,
|
||||
backup_id,
|
||||
backup_key,
|
||||
backup_interval,
|
||||
},
|
||||
{ id },
|
||||
) + ` RETURNING *;`;
|
||||
try {
|
||||
const entries = await pg.query(q);
|
||||
const {
|
||||
const q = updateWhereAllQuery(
|
||||
table,
|
||||
{
|
||||
name,
|
||||
host, // Should always read the database value
|
||||
server_type: serverType,
|
||||
storage,
|
||||
extra_ports: extraPorts,
|
||||
backup_enabled: backupEnabled,
|
||||
backup_host: backupHost,
|
||||
backup_bucket_path: backupPath,
|
||||
backup_id: backupId,
|
||||
backup_key: backupKey,
|
||||
backup_interval: backupInterval,
|
||||
} = entries[0];
|
||||
|
||||
const mclName = getMclName(host, id);
|
||||
|
||||
return {
|
||||
name, // Could change
|
||||
mclName, // Shouldn't change
|
||||
id, // Won't change
|
||||
host, // TODO: Can only be updated if service name is generic and non descriptive, this returns the host from the database
|
||||
host,
|
||||
version,
|
||||
serverType,
|
||||
server_type,
|
||||
cpu, // TODO: Ignored for now by the K8S manifests
|
||||
memory,
|
||||
storage,
|
||||
extraPorts,
|
||||
backupEnabled,
|
||||
backupHost,
|
||||
backupPath,
|
||||
backupId,
|
||||
backupKey,
|
||||
backupInterval,
|
||||
};
|
||||
} catch (e) {
|
||||
asExpressClientError(e);
|
||||
}
|
||||
// storage, // DO NOT INCLUDE THIS KEY, Not all storage providers in kubernetes allow for dynamically resizable PVCs
|
||||
extra_ports,
|
||||
backup_enabled,
|
||||
backup_host,
|
||||
backup_bucket_path,
|
||||
backup_id,
|
||||
backup_key,
|
||||
backup_interval,
|
||||
},
|
||||
{ id },
|
||||
);
|
||||
|
||||
return pg.query(q);
|
||||
}
|
||||
|
||||
export async function getServerEntries() {
|
||||
|
|
|
@ -6,7 +6,7 @@ env:
|
|||
image: garethflowers/ftp-server
|
||||
imagePullPolicy: IfNotPresent
|
||||
livenessProbe:
|
||||
exec: { command: ["/bin/sh", "-c", "netstat -a | grep -q ftp"] }
|
||||
exec: { command: ["echo"] }
|
||||
failureThreshold: 20
|
||||
initialDelaySeconds: 0
|
||||
periodSeconds: 5
|
||||
|
@ -15,7 +15,7 @@ livenessProbe:
|
|||
name: changeme-name-ftp
|
||||
ports: [] # Programatically add all the ports for easier readability, Ports include: 20,21,40000-400009
|
||||
readinessProbe:
|
||||
exec: { command: ["/bin/sh", "-c", "netstat -a | grep -q ftp"] }
|
||||
exec: { command: ["echo"] }
|
||||
failureThreshold: 20
|
||||
initialDelaySeconds: 0
|
||||
periodSeconds: 5
|
||||
|
|
|
@ -20,4 +20,4 @@ spec:
|
|||
selector:
|
||||
app: changeme-app
|
||||
sessionAffinity: None
|
||||
type: LoadBalancer
|
||||
type: ClusterIP
|
||||
|
|
|
@ -11,6 +11,8 @@ metadata:
|
|||
namespace: changeme-namespace
|
||||
spec:
|
||||
internalTrafficPolicy: Cluster
|
||||
ipFamilies:
|
||||
- IPv4
|
||||
ipFamilyPolicy: SingleStack
|
||||
ports: # Programatically add all FTP ports. Port range includes 20, 21, 40000-40001
|
||||
- name: minecraft
|
||||
|
|
|
@ -9,6 +9,4 @@ try {
|
|||
} catch (e) {
|
||||
kc.loadFromDefault();
|
||||
}
|
||||
if(kc.contexts.length === 1) kc.setCurrentContext(kc.contexts[0].name);
|
||||
if(!kc.currentContext) throw new Error("Could not infer current context! Please set it manually in the Kubeconfig!");
|
||||
export default kc;
|
||||
|
|
|
@ -18,7 +18,7 @@ const namespace = process.env.MCL_SERVER_NAMESPACE;
|
|||
|
||||
const loadYaml = (f) => yaml.load(fs.readFileSync(path.resolve(f), "utf8"));
|
||||
|
||||
export function createExtraService(serverSpec) {
|
||||
function createExtraService(serverSpec) {
|
||||
const { mclName, id, extraPorts } = serverSpec;
|
||||
if (!extraPorts) return;
|
||||
const serviceYaml = loadYaml("lib/k8s/configs/extra-svc.yml");
|
||||
|
@ -49,7 +49,7 @@ export function createExtraService(serverSpec) {
|
|||
return serviceYaml;
|
||||
}
|
||||
|
||||
export function createBackupSecret(serverSpec) {
|
||||
function createBackupSecret(serverSpec) {
|
||||
if (!serverSpec.backupEnabled) return; // If backup not defined, don't create RCLONE secret
|
||||
const { mclName, id, backupId, backupKey, backupHost } = serverSpec;
|
||||
const backupYaml = loadYaml("lib/k8s/configs/backup-secret.yml");
|
||||
|
@ -67,7 +67,7 @@ export function createBackupSecret(serverSpec) {
|
|||
`endpoint = ${backupHost}`,
|
||||
`acl = private`,
|
||||
`no_check_bucket = true`,
|
||||
`no_check_container = true`,
|
||||
`no_check_container = true`
|
||||
].join("\n");
|
||||
backupYaml.data["rclone.conf"] = Buffer.from(rcloneConfig).toString("base64");
|
||||
return backupYaml;
|
||||
|
@ -153,7 +153,7 @@ function createServerDeploy(serverSpec) {
|
|||
return deployYaml;
|
||||
}
|
||||
|
||||
export function createServerService(serverSpec) {
|
||||
function createServerService(serverSpec) {
|
||||
const { mclName, host, id } = serverSpec;
|
||||
const serviceYaml = loadYaml("lib/k8s/configs/server-svc.yml");
|
||||
serviceYaml.metadata.annotations["ingress.qumine.io/hostname"] = host;
|
||||
|
|
|
@ -2,8 +2,7 @@ import ftp from "basic-ftp";
|
|||
import { ERR } from "../util/logging.js";
|
||||
import { getServerAssets } from "./k8s-server-control.js";
|
||||
import ExpressClientError from "../util/ExpressClientError.js";
|
||||
import { Readable, Transform } from "node:stream";
|
||||
import { dirname, basename } from "node:path";
|
||||
import { Readable, Writable, Transform } from "node:stream";
|
||||
|
||||
const namespace = process.env.MCL_SERVER_NAMESPACE;
|
||||
|
||||
|
@ -83,27 +82,16 @@ export async function uploadServerItem(serverSpec, file) {
|
|||
const { path } = serverSpec;
|
||||
pathSecurityCheck(path);
|
||||
await useServerFtp(serverSpec, async (c) => {
|
||||
await c.ensureDir(dirname(path));
|
||||
await c.uploadFrom(fileStream, basename(path));
|
||||
await c.uploadFrom(fileStream, path);
|
||||
}).catch(handleError);
|
||||
}
|
||||
|
||||
export async function getServerItem(serverSpec) {
|
||||
export async function getServerItem(serverSpec, writableStream) {
|
||||
const { path } = serverSpec;
|
||||
const ds = new Transform({ transform: (c, _e, cb) => cb(null, c) });
|
||||
const ds = new Transform({ transform: (c, e, cb) => cb(null, c) });
|
||||
pathSecurityCheck(path);
|
||||
const ftpTransfer = useServerFtp(serverSpec, async (c) => {
|
||||
await c.downloadTo(ds, path);
|
||||
}).catch(handleError);
|
||||
return { ds, ftpTransfer };
|
||||
}
|
||||
|
||||
export async function moveServerItems(serverSpec) {
|
||||
const { destination, origin, files } = serverSpec;
|
||||
useServerFtp(serverSpec, async (c) =>
|
||||
Promise.all(
|
||||
files.map((f) => c.rename(`${origin}/${f}`, `${destination}/${f}`)),
|
||||
),
|
||||
).catch(handleError);
|
||||
return files;
|
||||
}
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
import k8s from "@kubernetes/client-node";
|
||||
import {
|
||||
createExtraService,
|
||||
createBackupSecret,
|
||||
createServerService,
|
||||
} from "./server-create.js";
|
||||
import kc from "./k8s-config.js";
|
||||
import { getServerAssets } from "./k8s-server-control.js";
|
||||
const k8sCore = kc.makeApiClient(k8s.CoreV1Api);
|
||||
const namespace = process.env.MCL_SERVER_NAMESPACE;
|
||||
|
||||
export default async function modifyServerResources(modifySpec) {
|
||||
const { id: serverId } = modifySpec;
|
||||
const serverAssets = await getServerAssets(serverId);
|
||||
const serverService = createServerService(modifySpec);
|
||||
const extraService = createExtraService(modifySpec);
|
||||
const backupSecret = createBackupSecret(modifySpec);
|
||||
const serverResources = [];
|
||||
|
||||
if (!!serverService)
|
||||
// Will Always Exist
|
||||
serverResources.push(
|
||||
k8sCore.replaceNamespacedService(
|
||||
serverAssets.service.metadata.name,
|
||||
namespace,
|
||||
serverService,
|
||||
),
|
||||
);
|
||||
|
||||
if (!!extraService && !!serverAssets.extraService)
|
||||
// Might not exist
|
||||
serverResources.push(
|
||||
k8sCore.replaceNamespacedService(
|
||||
serverAssets.extraService.metadata.name,
|
||||
namespace,
|
||||
extraService,
|
||||
),
|
||||
);
|
||||
else if (!!extraService)
|
||||
serverResources.push(
|
||||
k8sCore.createNamespacedService(namespace, extraService),
|
||||
);
|
||||
|
||||
if (!!backupSecret && !!serverAssets.backupSecret)
|
||||
// Might not exist
|
||||
serverResources.push(
|
||||
k8sCore.replaceNamespacedSecret(
|
||||
serverAssets.backupSecret.metadata.name,
|
||||
namespace,
|
||||
backupSecret,
|
||||
),
|
||||
);
|
||||
else if (!!backupSecret)
|
||||
serverResources.push(
|
||||
k8sCore.createNamespacedSecret(namespace, backupSecret),
|
||||
);
|
||||
|
||||
return await Promise.all(serverResources);
|
||||
}
|
|
@ -2,14 +2,11 @@ import { Router } from "express";
|
|||
import cairoAuthMiddleware from "./middlewares/auth-middleware.js";
|
||||
const router = Router();
|
||||
|
||||
const cairoProjectId = process.env.MCL_CAIRO_PROJECT;
|
||||
if(!cairoProjectId) throw Error("Cairo Project Required!");
|
||||
|
||||
const ok = (_r, res) => res.sendStatus(200);
|
||||
|
||||
function cairoRedirect(req, res) {
|
||||
res.redirect(
|
||||
`${process.env.MCL_CAIRO_URL}/cairo/authenticate?redirectUri=${req.query.redirectUri}&projectId=${cairoProjectId}`,
|
||||
`${process.env.MCL_CAIRO_URL}/cairo/auth?redirectUri=${req.query.redirectUri}`,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import {
|
|||
listFiles,
|
||||
uploadItem,
|
||||
getItem,
|
||||
moveItems,
|
||||
} from "../controllers/file-controller.js";
|
||||
|
||||
import cairoAuthMiddleware from "./middlewares/auth-middleware.js";
|
||||
|
@ -19,7 +18,6 @@ router.post("/list", listFiles);
|
|||
router.post("/folder", createFolder);
|
||||
router.delete("/item", deleteItem);
|
||||
router.post("/item", getItem);
|
||||
router.post("/move", moveItems);
|
||||
router.post("/upload", multerMiddleware.single("file"), uploadItem);
|
||||
|
||||
export default router;
|
||||
|
|
|
@ -4,36 +4,22 @@ import bearerTokenMiddleware from "express-bearer-token";
|
|||
import { ERR, VERB } from "../../util/logging.js";
|
||||
|
||||
// Constants
|
||||
const { MCL_CAIRO_URL, MCL_CAIRO_PROJECT } = process.env;
|
||||
const { MCL_CAIRO_URL } = process.env;
|
||||
const cairoAuthMiddleware = Router();
|
||||
|
||||
const cairoAuthenticate = async (token) => {
|
||||
const config = { headers: { Authorization: `Bearer ${token}` } };
|
||||
return fetch(`${MCL_CAIRO_URL}/api/${MCL_CAIRO_PROJECT}/auth/credentials`, config).then(async (res) => {
|
||||
if (res.status >= 300) {
|
||||
const errorMessage = await res
|
||||
.json()
|
||||
.then((data) => JSON.stringify(data))
|
||||
.catch(() => res.statusText);
|
||||
throw Error(
|
||||
`Could not authenticate with user, receieved message: ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
|
||||
return res.json();
|
||||
});
|
||||
return fetch(`${MCL_CAIRO_URL}/api/user/info`, config).then((res) =>
|
||||
res.json(),
|
||||
);
|
||||
};
|
||||
|
||||
// Middleware
|
||||
const cairoAuthHandler = (req, res, next) => {
|
||||
if (!req.token) return res.status(401).send("Cairo auth required!");
|
||||
VERB("AUTH", `${MCL_CAIRO_URL}/api/user/info`);
|
||||
cairoAuthenticate(req.token)
|
||||
.then((authData) => {
|
||||
console.log(authData);
|
||||
if (!authData?.user?.id)
|
||||
throw Error(`Cairo didn't return the expected data! ${authData?.user?.id}`);
|
||||
req.cairoId = authData?.user?.id;
|
||||
})
|
||||
.then((authData) => (req.cairoId = authData.id))
|
||||
.then(() => next())
|
||||
.catch((err) => {
|
||||
ERR("AUTH", err.response ? err.response.data : err.message);
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import { Router, json as jsonMiddleware } from "express";
|
||||
import { getS3BackupUrl, listS3Backups } from "../controllers/s3-controller.js";
|
||||
import cairoAuthMiddleware from "./middlewares/auth-middleware.js";
|
||||
|
||||
const router = Router();
|
||||
router.use([cairoAuthMiddleware, jsonMiddleware()]);
|
||||
|
||||
router.post("/backups", listS3Backups);
|
||||
router.post("/backup-url", getS3BackupUrl);
|
||||
|
||||
export default router;
|
|
@ -8,7 +8,6 @@ import systemRoute from "../routes/system-route.js";
|
|||
import serverRoute from "../routes/server-route.js";
|
||||
import filesRoute from "../routes/files-route.js";
|
||||
import reactRoute from "../routes/react-route.js";
|
||||
import s3Route from "../routes/s3-route.js";
|
||||
import {
|
||||
logErrors,
|
||||
clientErrorHandler,
|
||||
|
@ -28,7 +27,6 @@ export default function buildRoutes(pg, skio) {
|
|||
router.use("/api/system", systemRoute);
|
||||
router.use("/api/server", serverRoute);
|
||||
router.use("/api/files", filesRoute);
|
||||
router.use("/api/s3", s3Route);
|
||||
router.use(["/mcl", "/mcl/*"], reactRoute); // Static Build Route
|
||||
/*router.use(logErrors);
|
||||
router.use(clientErrorHandler);
|
||||
|
|
34
lib/storage/s3-integration.js
Normal file
34
lib/storage/s3-integration.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
import multer from "multer";
|
||||
import multerS3 from "multer-s3";
|
||||
import AWS from "aws-sdk";
|
||||
|
||||
// Environment Variables
|
||||
const {
|
||||
MCL_S3_ENDPOINT: s3Endpoint,
|
||||
MCL_S3_ACCESS_KEY_ID: s3KeyId,
|
||||
MCL_S3_ACCESS_KEY: s3Key,
|
||||
} = process.env;
|
||||
|
||||
export const mcl = "mcl";
|
||||
|
||||
export const s3 = new AWS.S3({
|
||||
endpoint: s3Endpoint,
|
||||
accessKeyId: s3KeyId,
|
||||
secretAccessKey: s3Key,
|
||||
sslEnabled: true,
|
||||
s3ForcePathStyle: true,
|
||||
});
|
||||
|
||||
const storage = multerS3({
|
||||
s3,
|
||||
bucket,
|
||||
contentType: multerS3.AUTO_CONTENT_TYPE,
|
||||
metadata: (req, file, cb) => {
|
||||
cb(null, { fieldName: file.fieldname });
|
||||
},
|
||||
key: (req, file, cb) => {
|
||||
cb(null, Date.now().toString());
|
||||
},
|
||||
});
|
||||
|
||||
export const upload = multer({ storage });
|
2474
package-lock.json
generated
2474
package-lock.json
generated
File diff suppressed because it is too large
Load diff
16
package.json
16
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "minecluster",
|
||||
"version": "0.0.1-alpha.1",
|
||||
"version": "0.0.1-alpha.0",
|
||||
"description": "Minecraft Server management using Kubernetes",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
@ -8,7 +8,7 @@
|
|||
"start": "node dist/app.js",
|
||||
"dev:server": "nodemon dist/app.js",
|
||||
"dev:react": "vite",
|
||||
"lint": "npx prettier -w src lib vite.config.js",
|
||||
"kub": "nodemon lib/k8s.js",
|
||||
"start:dev": "concurrently -k \"MCL_DEV_PORT=52025 npm run dev:server\" \" MCL_VITE_DEV_PORT=52000 MCL_VITE_BACKEND_URL=http://localhost:52025 npm run dev:react\" -n s,v -p -c green,yellow",
|
||||
"start:dev:garden": "concurrently -k \"npm run dev:server\" \"npm run dev:react\" -n s,v -p -c green,yellow"
|
||||
},
|
||||
|
@ -24,9 +24,9 @@
|
|||
"devDependencies": {
|
||||
"@emotion/react": "^11.11.3",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@mui/icons-material": "^5.15.9",
|
||||
"@mui/material": "^5.15.9",
|
||||
"@tanstack/react-query": "^5.20.1",
|
||||
"@mui/icons-material": "^5.15.7",
|
||||
"@mui/material": "^5.15.7",
|
||||
"@tanstack/react-query": "^5.18.1",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"chonky": "^2.3.2",
|
||||
"chonky-icon-fontawesome": "^2.3.2",
|
||||
|
@ -39,12 +39,11 @@
|
|||
"react-router-dom": "^6.22.0",
|
||||
"react-toastify": "^10.0.4",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"vite": "^5.1.1"
|
||||
"vite": "^5.0.12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.529.1",
|
||||
"@aws-sdk/s3-request-presigner": "^3.529.1",
|
||||
"@kubernetes/client-node": "^0.20.0",
|
||||
"aws-sdk": "^2.1550.0",
|
||||
"basic-ftp": "^5.0.4",
|
||||
"bcrypt": "^5.1.1",
|
||||
"chalk": "^5.3.0",
|
||||
|
@ -58,7 +57,6 @@
|
|||
"pg-promise": "^11.5.4",
|
||||
"postgres-migrations": "^5.3.0",
|
||||
"rcon-client": "^4.2.4",
|
||||
"react-dropzone": "^14.2.3",
|
||||
"socket.io": "^4.7.4",
|
||||
"uuid": "^9.0.1"
|
||||
}
|
||||
|
|
42
src/components/files/ChonkyStyledFileBrowser.jsx
Normal file
42
src/components/files/ChonkyStyledFileBrowser.jsx
Normal file
|
@ -0,0 +1,42 @@
|
|||
// ChonkyFullFileBrowser.tsx
|
||||
import { forwardRef, memo } from "react";
|
||||
import {
|
||||
StylesProvider,
|
||||
createGenerateClassName,
|
||||
} from "@material-ui/core/styles";
|
||||
|
||||
import {
|
||||
FileBrowser,
|
||||
FileList,
|
||||
FileContextMenu,
|
||||
FileNavbar,
|
||||
FileToolbar,
|
||||
setChonkyDefaults,
|
||||
FileBrowserHandle,
|
||||
FileBrowserProps,
|
||||
} from "chonky";
|
||||
|
||||
import { ChonkyIconFA } from "chonky-icon-fontawesome";
|
||||
|
||||
setChonkyDefaults({ iconComponent: ChonkyIconFA });
|
||||
|
||||
const muiJSSClassNameGenerator = createGenerateClassName({
|
||||
// Seed property is used to add a prefix classes generated by material ui.
|
||||
seed: "chonky",
|
||||
});
|
||||
|
||||
export default memo(
|
||||
forwardRef((props, ref) => {
|
||||
const { onScroll } = props;
|
||||
return (
|
||||
<StylesProvider generateClassName={muiJSSClassNameGenerator}>
|
||||
<FileBrowser ref={ref} {...props}>
|
||||
<FileNavbar />
|
||||
<FileToolbar />
|
||||
<FileList onScroll={onScroll} />
|
||||
<FileContextMenu />
|
||||
</FileBrowser>
|
||||
</StylesProvider>
|
||||
);
|
||||
}),
|
||||
);
|
|
@ -1,4 +1,4 @@
|
|||
import { useState, useEffect } from "react";
|
||||
import { useState, useEffect, memo } from "react";
|
||||
import useMediaQuery from "@mui/material/useMediaQuery";
|
||||
import { useTheme } from "@mui/material/styles";
|
||||
import Button from "@mui/material/Button";
|
||||
|
@ -10,17 +10,7 @@ import Toolbar from "@mui/material/Toolbar";
|
|||
import TextEditor from "./TextEditor.jsx";
|
||||
import { cairoAuthHeader } from "@mcl/util/auth.js";
|
||||
|
||||
const textFileTypes = [
|
||||
"properties",
|
||||
"txt",
|
||||
"yaml",
|
||||
"yml",
|
||||
"json",
|
||||
"env",
|
||||
"toml",
|
||||
"tml",
|
||||
"text",
|
||||
];
|
||||
const textFileTypes = ["properties", "txt", "yaml", "yml", "json", "env"];
|
||||
const imageFileTypes = ["png", "jpeg", "jpg"];
|
||||
|
||||
export const supportedFileTypes = [...textFileTypes, ...imageFileTypes];
|
||||
|
@ -54,7 +44,6 @@ export default function FilePreview(props) {
|
|||
}
|
||||
|
||||
async function onSave() {
|
||||
if (!isTextFile) return;
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([modifiedText], { type: "plain/text" });
|
||||
formData.append("file", blob, name);
|
||||
|
@ -88,7 +77,7 @@ export default function FilePreview(props) {
|
|||
<Toolbar sx={{ display: { sm: "none" } }} />
|
||||
<DialogTitle>{name}</DialogTitle>
|
||||
<DialogContent>
|
||||
{isTextFile && <TextEditor text={fileText} onChange={editorChange} />}
|
||||
<TextEditor text={fileText} onChange={editorChange} />
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button autoFocus onClick={dialogToggle}>
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
import { useState, useEffect, useMemo, useRef } from "react";
|
||||
import Box from "@mui/material/Box";
|
||||
import Dropzone from "react-dropzone";
|
||||
|
||||
import {
|
||||
FileBrowser,
|
||||
FileContextMenu,
|
||||
|
@ -18,9 +16,8 @@ import {
|
|||
createServerFolder,
|
||||
deleteServerItem,
|
||||
getServerItem,
|
||||
moveServerItems,
|
||||
previewServerItem,
|
||||
} from "@mcl/queries";
|
||||
import { previewServerItem } from "../../util/queries";
|
||||
import { cairoAuthHeader } from "@mcl/util/auth.js";
|
||||
|
||||
import { supportedFileTypes } from "./FilePreview.jsx";
|
||||
|
@ -35,7 +32,6 @@ export default function MineclusterFiles(props) {
|
|||
ChonkyActions.DownloadFiles,
|
||||
ChonkyActions.CopyFiles,
|
||||
ChonkyActions.DeleteFiles,
|
||||
ChonkyActions.MoveFiles,
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
@ -101,22 +97,16 @@ export default function MineclusterFiles(props) {
|
|||
function uploadFileSelection(e) {
|
||||
if (!e.target.files || e.target.files.length === 0) return;
|
||||
const { files } = e.target;
|
||||
uploadMultipleFiles(files);
|
||||
}
|
||||
|
||||
function uploadMultipleFiles(files) {
|
||||
Promise.all([...files].map((f) => uploadFile(f)))
|
||||
.catch((e) => console.log("Error uploading a file", e))
|
||||
.then(updateFiles);
|
||||
}
|
||||
|
||||
async function uploadFile(file) {
|
||||
const filePath = file.path.startsWith("/") ? file.path : `/${file.path}`;
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
formData.append("id", serverId);
|
||||
const path = `${[...dirStack].join("/")}${filePath}`;
|
||||
formData.append("path", path);
|
||||
formData.append("path", [...dirStack, file.name].join("/"));
|
||||
await fetch("/api/files/upload", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
|
@ -142,15 +132,6 @@ export default function MineclusterFiles(props) {
|
|||
);
|
||||
}
|
||||
|
||||
function moveFile(movePayload) {
|
||||
const { files: filePayload, destination: destinationPayload } = movePayload;
|
||||
if (!destinationPayload.isDir || filePayload.length === 0) return;
|
||||
const files = filePayload.map((f) => f.name);
|
||||
const dest = destinationPayload.id;
|
||||
const origin = dirStack.join("/");
|
||||
moveServerItems(serverId, files, dest, origin).then(updateFiles);
|
||||
}
|
||||
|
||||
function fileClick(chonkyEvent) {
|
||||
const { id: clickEvent, payload } = chonkyEvent;
|
||||
if (clickEvent === "open_parent_folder") return openParentFolder();
|
||||
|
@ -160,41 +141,32 @@ export default function MineclusterFiles(props) {
|
|||
return downloadFiles(chonkyEvent.state.selectedFilesForAction);
|
||||
if (clickEvent === "delete_files")
|
||||
return deleteItems(chonkyEvent.state.selectedFilesForAction);
|
||||
if (clickEvent === "move_files") return moveFile(payload);
|
||||
if (clickEvent !== "open_files") return; // console.log(clickEvent);
|
||||
openItem(payload);
|
||||
}
|
||||
|
||||
return (
|
||||
<Dropzone onDrop={uploadMultipleFiles}>
|
||||
{({ getRootProps }) => (
|
||||
<Box
|
||||
className="minecluster-files"
|
||||
sx={{ height: "calc(100vh - 6rem)" }}
|
||||
onDrop={getRootProps().onDrop}
|
||||
>
|
||||
<input
|
||||
type="file"
|
||||
id="file"
|
||||
ref={inputRef}
|
||||
style={{ display: "none" }}
|
||||
onChange={uploadFileSelection}
|
||||
multiple
|
||||
/>
|
||||
<FileBrowser
|
||||
files={files}
|
||||
folderChain={getFolderChain()}
|
||||
onFileAction={fileClick}
|
||||
fileActions={fileActions}
|
||||
darkMode={true}
|
||||
>
|
||||
<FileNavbar />
|
||||
<FileToolbar />
|
||||
<FileList />
|
||||
<FileContextMenu />
|
||||
</FileBrowser>
|
||||
</Box>
|
||||
)}
|
||||
</Dropzone>
|
||||
<Box className="minecluster-files" sx={{ height: "calc(100vh - 6rem)" }}>
|
||||
<input
|
||||
type="file"
|
||||
id="file"
|
||||
ref={inputRef}
|
||||
style={{ display: "none" }}
|
||||
onChange={uploadFileSelection}
|
||||
multiple
|
||||
/>
|
||||
<FileBrowser
|
||||
files={files}
|
||||
folderChain={getFolderChain()}
|
||||
onFileAction={fileClick}
|
||||
fileActions={fileActions}
|
||||
darkMode={true}
|
||||
>
|
||||
<FileNavbar />
|
||||
<FileToolbar />
|
||||
|
||||
<FileList />
|
||||
<FileContextMenu />
|
||||
</FileBrowser>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -3,8 +3,7 @@ import TextField from "@mui/material/TextField";
|
|||
import Autocomplete from "@mui/material/Autocomplete";
|
||||
import Chip from "@mui/material/Chip";
|
||||
|
||||
const validatePort = (p) =>
|
||||
p !== "25565" && p !== "25575" && p.length < 6 && parseInt(p) < 60_000;
|
||||
const validatePort = (p) => p !== "25565" && p !== "25575" && p.length < 6;
|
||||
|
||||
export default function ExtraPortsOption(props) {
|
||||
const { extraPorts: initExtraPorts } = props;
|
||||
|
@ -31,14 +30,7 @@ export default function ExtraPortsOption(props) {
|
|||
value={extraPorts}
|
||||
onChange={portChange}
|
||||
freeSolo
|
||||
renderInput={(p) => (
|
||||
<TextField
|
||||
{...p}
|
||||
label="Extra Ports"
|
||||
helperText="Remember to press enter to add the port!"
|
||||
FormHelperTextProps={{ sx: { ml: 0 } }}
|
||||
/>
|
||||
)}
|
||||
renderInput={(p) => <TextField {...p} label="Extra Ports" />}
|
||||
renderTags={(value, getTagProps) =>
|
||||
value.map((option, index) => {
|
||||
const defaultChipProps = getTagProps({ index });
|
||||
|
|
|
@ -1,21 +1,15 @@
|
|||
import TextField from "@mui/material/TextField";
|
||||
export default function HostOption(props) {
|
||||
const { value, onChange, disabled } = props;
|
||||
|
||||
function onTextChange(e) {
|
||||
e.target.value = e.target.value.toLowerCase();
|
||||
onChange(e);
|
||||
}
|
||||
const { value, onChange } = props;
|
||||
|
||||
return (
|
||||
<TextField
|
||||
label="Host"
|
||||
value={value ?? ""}
|
||||
onChange={onTextChange}
|
||||
onChange={onChange}
|
||||
helperText="Example: host.mydomain.com"
|
||||
FormHelperTextProps={{ sx: { ml: 0 } }}
|
||||
required
|
||||
disabled={disabled}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
import { useEffect, useState } from "react";
|
||||
import useMediaQuery from "@mui/material/useMediaQuery";
|
||||
import { useTheme } from "@mui/material/styles";
|
||||
import Button from "@mui/material/Button";
|
||||
import DialogTitle from "@mui/material/DialogTitle";
|
||||
import DialogContent from "@mui/material/DialogContent";
|
||||
import DialogActions from "@mui/material/DialogActions";
|
||||
import Dialog from "@mui/material/Dialog";
|
||||
import IconButton from "@mui/material/IconButton";
|
||||
import Toolbar from "@mui/material/Toolbar";
|
||||
import Typography from "@mui/material/Typography";
|
||||
import Stack from "@mui/material/Stack";
|
||||
import DownloadIcon from "@mui/icons-material/Download";
|
||||
import { getBackupUrl, getServerBackups } from "../../util/queries";
|
||||
|
||||
export function useBackupDialog(isOpen = false) {
|
||||
const [open, setOpen] = useState(isOpen);
|
||||
const dialogToggle = () => setOpen(!open);
|
||||
return [open, dialogToggle];
|
||||
}
|
||||
|
||||
export default function BackupDialog(props) {
|
||||
const { serverId, open, dialogToggle } = props;
|
||||
const theme = useTheme();
|
||||
const fullScreen = useMediaQuery(theme.breakpoints.down("md"));
|
||||
const [backups, setBackups] = useState([]);
|
||||
|
||||
function refreshUpdateList() {
|
||||
getServerBackups(serverId).then(setBackups);
|
||||
}
|
||||
useEffect(() => {
|
||||
if (!serverId) return;
|
||||
refreshUpdateList();
|
||||
}, [serverId, open]);
|
||||
|
||||
function normalizeLastModified(lastModified) {
|
||||
const d = new Date(Date.parse(lastModified));
|
||||
return `${d.getFullYear()}-${d.getMonth()}-${d.getDate()} ${d.getHours()}:${d.getMinutes()}`;
|
||||
}
|
||||
|
||||
const downloadBackup = (backup) =>
|
||||
async function openBackupLink() {
|
||||
const { url } = await getBackupUrl(serverId, backup.path);
|
||||
window.open(url, "_blank").focus();
|
||||
};
|
||||
|
||||
const normalizedSize = (size) => `${(size / Math.pow(1024, 3)).toFixed(2)}GB`;
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
fullWidth
|
||||
maxWidth="lg"
|
||||
open={open}
|
||||
fullScreen={fullScreen}
|
||||
PaperProps={!fullScreen ? { sx: { height: "60%" } } : undefined}
|
||||
>
|
||||
<Toolbar sx={{ display: { md: "none" } }} />
|
||||
<DialogTitle>Backups</DialogTitle>
|
||||
<DialogContent sx={{ height: "100%" }}>
|
||||
{backups.map((backup, i) => (
|
||||
<Stack key={i} sx={{ width: "100%" }} direction="row">
|
||||
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "40%" }}>
|
||||
{backup.name}
|
||||
</Typography>
|
||||
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "20%" }}>
|
||||
{normalizeLastModified(backup.lastModified)}
|
||||
</Typography>
|
||||
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "40%" }}>
|
||||
{normalizedSize(backup.size)}
|
||||
</Typography>
|
||||
|
||||
<IconButton
|
||||
sx={{ marginLeft: "auto" }}
|
||||
onClick={downloadBackup(backup)}
|
||||
>
|
||||
<DownloadIcon />
|
||||
</IconButton>
|
||||
</Stack>
|
||||
))}
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
<Button autoFocus onClick={dialogToggle}>
|
||||
Close
|
||||
</Button>
|
||||
</DialogActions>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import { useState } from "react";
|
||||
import { useState, useEffect } from "react";
|
||||
import useMediaQuery from "@mui/material/useMediaQuery";
|
||||
import { useTheme } from "@mui/material/styles";
|
||||
import Button from "@mui/material/Button";
|
||||
|
@ -19,19 +19,22 @@ export default function RconDialog(props) {
|
|||
const { server, open, dialogToggle } = props;
|
||||
const { name: serverName, id: serverId } = server ?? {};
|
||||
const theme = useTheme();
|
||||
const fullScreen = useMediaQuery(theme.breakpoints.down("md"));
|
||||
const fullScreen = useMediaQuery(theme.breakpoints.down("sm"));
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
fullWidth
|
||||
maxWidth="lg"
|
||||
sx={
|
||||
fullScreen
|
||||
? {}
|
||||
: { "& .mcl-MuiDialog-paper": { width: "80%", maxHeight: 555 } }
|
||||
}
|
||||
maxWidth="xs"
|
||||
open={open}
|
||||
fullScreen={fullScreen}
|
||||
PaperProps={!fullScreen ? { sx: { height: "60%" } } : undefined}
|
||||
>
|
||||
<Toolbar sx={{ display: { md: "none" } }} />
|
||||
<Toolbar sx={{ display: { sm: "none" } }} />
|
||||
<DialogTitle>RCON - {serverName}</DialogTitle>
|
||||
<DialogContent sx={{ height: "100%" }}>
|
||||
<DialogContent>
|
||||
<RconView serverId={serverId} />
|
||||
</DialogContent>
|
||||
<DialogActions>
|
||||
|
|
|
@ -2,21 +2,9 @@ import { useState, useEffect, useRef } from "react";
|
|||
import Box from "@mui/material/Box";
|
||||
import Button from "@mui/material/Button";
|
||||
import TextField from "@mui/material/TextField";
|
||||
import Skeleton from "@mui/material/Skeleton";
|
||||
import Typography from "@mui/material/Typography";
|
||||
import RconSocket from "./RconSocket.js";
|
||||
import "@mcl/css/rcon.css";
|
||||
|
||||
function RconLogSkeleton() {
|
||||
return (
|
||||
<Skeleton
|
||||
variant="text"
|
||||
width="100%"
|
||||
sx={{ backgroundColor: "rgba(255,255,255,.25)" }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default function RconView(props) {
|
||||
const { serverId } = props;
|
||||
const logsRef = useRef(0);
|
||||
|
@ -51,32 +39,16 @@ export default function RconView(props) {
|
|||
}
|
||||
|
||||
return (
|
||||
<Box sx={{ height: "100%", display: "flex", flexWrap: "wrap" }}>
|
||||
<Box
|
||||
className="rconLogsWrapper"
|
||||
ref={logsRef}
|
||||
style={{
|
||||
padding: "1rem",
|
||||
backgroundColor: "rgba(0,0,0,.815)",
|
||||
color: "white",
|
||||
borderRadius: "4px",
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
}}
|
||||
>
|
||||
{logs.length === 0 &&
|
||||
[...Array(20).keys()].map((_v, i) => <RconLogSkeleton key={i} />)}
|
||||
{logs.length > 0 &&
|
||||
logs.map((v, k) => (
|
||||
<Box key={k}>
|
||||
<Typography variant="subtitle2">{v}</Typography>
|
||||
</Box>
|
||||
))}
|
||||
</Box>
|
||||
<Box
|
||||
className="rconActions"
|
||||
sx={{ marginTop: "auto", paddingTop: "1rem", width: "100%" }}
|
||||
>
|
||||
<Box>
|
||||
<div className="rconLogsWrapper" ref={logsRef}>
|
||||
{logs.map((v, k) => (
|
||||
<Box key={k}>
|
||||
{v}
|
||||
<br />
|
||||
</Box>
|
||||
))}
|
||||
</div>
|
||||
<Box className="rconActions">
|
||||
<TextField
|
||||
id="outlined-basic"
|
||||
label="Command"
|
||||
|
@ -84,12 +56,9 @@ export default function RconView(props) {
|
|||
value={cmd}
|
||||
onChange={updateCmd}
|
||||
disabled={!(rcon && rcon.rconLive && !rcon.rconError)}
|
||||
sx={{ width: "100%" }}
|
||||
/>
|
||||
{rcon && rcon.rconLive && !rcon.rconError && (
|
||||
<Button onClick={sendCommand} sx={{ padding: "0 2rem" }}>
|
||||
Send
|
||||
</Button>
|
||||
<Button onClick={sendCommand}>Send</Button>
|
||||
)}
|
||||
{!(rcon && rcon.rconLive && !rcon.rconError) && (
|
||||
<Button color="secondary">Not Connected</Button>
|
||||
|
|
|
@ -14,11 +14,10 @@ import PlayArrowIcon from "@mui/icons-material/PlayArrow";
|
|||
import DeleteForeverIcon from "@mui/icons-material/DeleteForever";
|
||||
import EditIcon from "@mui/icons-material/Edit";
|
||||
import FolderIcon from "@mui/icons-material/Folder";
|
||||
import BackupIcon from "@mui/icons-material/Backup";
|
||||
import { Link } from "react-router-dom";
|
||||
|
||||
export default function ServerCard(props) {
|
||||
const { server, openRcon, openBackups } = props;
|
||||
const { server, openRcon } = props;
|
||||
const { name, id, metrics, ftpAvailable, serverAvailable, services } = server;
|
||||
const startServer = useStartServer(id);
|
||||
const stopServer = useStopServer(id);
|
||||
|
@ -118,14 +117,6 @@ export default function ServerCard(props) {
|
|||
>
|
||||
<EditIcon />
|
||||
</IconButton>
|
||||
<IconButton
|
||||
color="info"
|
||||
aria-label="Backups"
|
||||
size="large"
|
||||
onClick={openBackups}
|
||||
>
|
||||
<BackupIcon />
|
||||
</IconButton>
|
||||
<IconButton
|
||||
color="info"
|
||||
aria-label="Files"
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
.rconLogsWrapper {
|
||||
overflow-y: scroll;
|
||||
max-height: calc(100% - 6rem);
|
||||
max-height: 20rem;
|
||||
word-wrap: break-word;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.rconActions {
|
||||
display: inline-flex;
|
||||
|
|
|
@ -73,11 +73,7 @@ export default function EditCoreOptions(props) {
|
|||
>
|
||||
<FormControl fullWidth sx={{ mt: "2rem", display: "flex", gap: ".5rem" }}>
|
||||
<NameOption value={spec.name} onChange={coreUpdate("name")} />
|
||||
<HostOption
|
||||
value={spec.host}
|
||||
onChange={coreUpdate("host")}
|
||||
disabled={true}
|
||||
/>
|
||||
<HostOption value={spec.host} onChange={coreUpdate("host")} />
|
||||
<VersionOption value={spec.version} onChange={coreUpdate("version")} />
|
||||
<ServerTypeOption
|
||||
value={spec.serverType}
|
||||
|
|
|
@ -12,16 +12,12 @@ import SpeedDialIcon from "@mui/material/SpeedDialIcon";
|
|||
import "@mcl/css/server-card.css";
|
||||
import "@mcl/css/overview.css";
|
||||
import { useServerInstances } from "@mcl/queries";
|
||||
import BackupDialog, {
|
||||
useBackupDialog,
|
||||
} from "../components/servers/BackupsDialog";
|
||||
|
||||
export default function Home() {
|
||||
const clusterMetrics = { cpu: 0, memory: 0 };
|
||||
const [server, setServer] = useState();
|
||||
const [servers, setServers] = useState([]);
|
||||
const [rdOpen, rconToggle] = useRconDialog();
|
||||
const [bkOpen, backupsToggle] = useBackupDialog();
|
||||
const { isLoading, data: serversData } = useServerInstances();
|
||||
const serverInstances = serversData ?? [];
|
||||
useEffect(() => {
|
||||
|
@ -35,11 +31,6 @@ export default function Home() {
|
|||
rconToggle();
|
||||
};
|
||||
|
||||
const openBackups = (s) => () => {
|
||||
setServer(s);
|
||||
backupsToggle();
|
||||
};
|
||||
|
||||
return (
|
||||
<Box className="home">
|
||||
<Overview clusterMetrics={clusterMetrics} />
|
||||
|
@ -60,20 +51,10 @@ export default function Home() {
|
|||
<Box className="servers">
|
||||
{!isLoading &&
|
||||
servers.map((s, k) => (
|
||||
<ServerCard
|
||||
key={k}
|
||||
server={s}
|
||||
openRcon={openRcon(s)}
|
||||
openBackups={openBackups(s)}
|
||||
/>
|
||||
<ServerCard key={k} server={s} openRcon={openRcon(s)} />
|
||||
))}
|
||||
</Box>
|
||||
<RconDialog open={rdOpen} dialogToggle={rconToggle} server={server} />
|
||||
<BackupDialog
|
||||
open={bkOpen}
|
||||
dialogToggle={backupsToggle}
|
||||
serverId={server?.id}
|
||||
/>
|
||||
<Button
|
||||
component={Link}
|
||||
to="/mcl/create"
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { useState, useEffect } from "react";
|
||||
import { useSearchParams } from "react-router-dom";
|
||||
|
||||
const tokenStorageName = "cairoUserToken";
|
||||
const tokenQuery = "cairoUserToken";
|
||||
const tokenStorageName = "cairoAuthToken";
|
||||
const tokenQuery = "cairoAuthToken";
|
||||
|
||||
const verifyAuth = (authToken) =>
|
||||
fetch("/api/auth/verify", {
|
||||
|
|
|
@ -54,11 +54,6 @@ export const useGetServer = (serverId) =>
|
|||
queryFn: fetchApiPost("/server/blueprint", { id: serverId }),
|
||||
});
|
||||
|
||||
export const getServerBackups = (serverId) =>
|
||||
fetchApiCore("/s3/backups", { id: serverId }, "POST", true);
|
||||
export const getBackupUrl = (serverId, backupPath) =>
|
||||
fetchApiCore("/s3/backup-url", { id: serverId, backupPath }, "POST", true);
|
||||
|
||||
export const getServerFiles = async (serverId, path) =>
|
||||
fetchApiCore("/files/list", { id: serverId, path }, "POST", true);
|
||||
export const createServerFolder = async (serverId, path) =>
|
||||
|
@ -69,13 +64,6 @@ export const createServerFolder = async (serverId, path) =>
|
|||
export const deleteServerItem = async (serverId, path, isDir) =>
|
||||
fetchApiCore("/files/item", { id: serverId, path, isDir }, "DELETE");
|
||||
|
||||
export const moveServerItems = async (serverId, files, destination, origin) =>
|
||||
fetchApiCore(
|
||||
"/files/move",
|
||||
{ id: serverId, files, destination, origin },
|
||||
"POST",
|
||||
);
|
||||
|
||||
export async function previewServerItem(serverId, path) {
|
||||
const resp = await fetchApiCore("/files/item", { id: serverId, path });
|
||||
if (resp.status !== 200) return console.log("AHHHH");
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue