Compare commits

...
Sign in to create a new pull request.

15 commits

Author SHA1 Message Date
c4882cb22f [CHORE] Adjust Forgejo Actions
All checks were successful
S3 Repo Backup / s3-repo-backup (push) Successful in 19s
Deploy USW-MC / deploy-edge (push) Successful in 2m21s
2024-08-25 16:46:21 -06:00
27b11fcd50 [FIX] Infer Current Context
All checks were successful
Deploy USW-MC / deploy-edge (push) Successful in 2m7s
2024-08-25 11:52:25 -06:00
968aa1fc74 [CHORE] Adjust Action Inputs
Some checks failed
Deploy USW-MC / deploy-edge (push) Failing after 6m40s
2024-08-25 11:34:55 -06:00
b2f093111f [CHORE] Adjust Action Inputs
Some checks failed
Deploy USW-MC / deploy-edge (push) Has been cancelled
2024-08-25 11:28:29 -06:00
f22b9a3262 [FIX} Fix Minecluster Deployment Again
Some checks failed
Deploy USW-MC / deploy-edge (push) Failing after 10s
2024-08-25 11:18:08 -06:00
6e9c71568d [CHORE] Adjust Actions For Deployment
All checks were successful
Deploy USW-MC / deploy-edge (push) Successful in 2m3s
2024-08-25 10:56:04 -06:00
8c7e41b21b [FIX] Use New Cairo System
Some checks failed
Deploy USW-MC / deploy-edge (push) Failing after 2s
Deploy Edge Proxy / deploy-edge (push) Failing after 2s
S3 Repo Backup / s3-repo-backup (push) Failing after 5s
2024-08-24 16:41:52 -06:00
6eed3fd694 [HOTFIX] Auth Credential Failures
Some checks failed
Deploy Edge Proxy / deploy-edge (push) Failing after 3s
Deploy USW-MC / deploy-edge (push) Failing after 2s
S3 Repo Backup / s3-repo-backup (push) Failing after 4s
2024-03-29 13:40:27 -06:00
626ebf9d1d [HOTFIX] Auth Credential Failure check 2024-03-29 12:39:44 -06:00
40f020d27b [HOTFIX] Auth Errors 2024-03-29 12:05:23 -06:00
2ba97fcb70 [DEBUG] Minecluster Auth Errors 2024-03-29 12:01:15 -06:00
87e87f89d3 [FIX] Fixed Kubeconfig access with envar change 2024-03-21 22:13:17 -06:00
7eaa13113e [PATCH] Import FTP Readiness & Liveness probe correctly configured 2024-03-12 22:58:27 -06:00
dunemask
6efa50e86b [FEATURE] Backups View & Style fix (#22)
Co-authored-by: Dunemask <dunemask@gmail.com>
Reviewed-on: https://gitea.dunemask.dev/elysium/minecluster/pulls/22
2024-03-12 01:58:25 +00:00
dunemask
332f84972c [FEATURE] Allow folder uploads (#21)
Co-authored-by: Dunemask <dunemask@gmail.com>
Reviewed-on: https://gitea.dunemask.dev/elysium/minecluster/pulls/21
2024-03-11 19:32:11 +00:00
27 changed files with 1117 additions and 1063 deletions

View file

@ -0,0 +1,31 @@
# name: Deploy Edge Proxy
# run-name: ${{ gitea.actor }} Deploy Edge Proxy
# on:
# push:
# branches: [ master ]
# env:
# GARDEN_DEPLOY_ACTION: minecluster-proxy
# jobs:
# deploy-edge:
# steps:
# # Setup Oasis
# - name: Oasis Setup
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
# with:
# deploy-env: edge
# infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
# # Deploy to Edge Cluster
# - name: Deploy to Edge Cluster
# run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
# working-directory: ${{ env.OASIS_WORKSPACE }}
# # Alert via Discord
# - name: Discord Alert
# if: always()
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
# with:
# status: ${{ job.status }}
# channel: deployments
# header: DEPLOY EDGE
# additional-content: "Minecluster Proxy"

View file

@ -1,8 +1,8 @@
name: Deploy USW-MC
run-name: ${{ gitea.actor }} Deploy USW-MC
run-name: ${{ forgejo.actor }} Deploy USW-MC
on:
push:
branches: [ master ]
branches: [master]
env:
GARDEN_DEPLOY_ACTION: minecluster
@ -10,32 +10,33 @@ env:
jobs:
deploy-edge:
steps:
# Configure proper kubeconfig
# Configure proper kubeconfig (Used when cluster does not match the edge environment)
- name: Get usw-mc deployment kubeconfig
uses: https://gitea.dunemask.dev/elysium/elysium-actions@infisical-env
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@infisical-env
with:
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
project-id: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
secret-envs: edge
secret-paths: /kubernetes
secret-paths: /kubernetes/usw-mc
# Setup Oasis
- name: Oasis Setup
uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
with:
deploy-env: edge
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
extra-secret-paths: /alexandria
infisical-project: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
extra-secret-paths: /dashboard
extra-secret-envs: edge
kubeconfig: ${{ env.KUBERNETES_CONFIG_USW_MC }}
# Deploy to Edge
- name: Deploy to Edge env
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-mc
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
working-directory: ${{ env.OASIS_WORKSPACE }}
env:
MCL_KUBECONFIG: ${{ secrets.KUBECONFIG_USW_MC }}
env: # (Used when cluster does not match the edge environment)
MCL_KUBECONFIG: ${{ env.KUBERNETES_CONFIG_USW_MC }}
# Alert via Discord
- name: Discord Alert
if: always()
uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@discord-status
with:
status: ${{ job.status }}
channel: deployments

View file

@ -0,0 +1,42 @@
# name: QA API Tests
# run-name: ${{ gitea.actor }} QA API Test
# on:
# pull_request:
# branches: [ master ]
# env:
# REPO_DIR: ${{ gitea.workspace }}/minecluster
# GARDEN_LINK_ACTION: build.minecluster-image
# jobs:
# qa-api-tests:
# steps:
# # Setup Oasis
# - name: Oasis Setup
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
# with:
# deploy-env: ci
# infisical-token: ${{ secrets.INFISICAL_ELYSIUM_CI_READ_TOKEN }}
# # Test Code
# - name: Checkout repository
# uses: actions/checkout@v3
# with:
# path: ${{ env.REPO_DIR }}
# # Garden link
# - name: Link Repo code to Garden
# run: garden link action $GARDEN_LINK_ACTION $REPO_DIR --env usw-ci --var cubit-projects=cairo,minecluster
# working-directory: ${{ env.OASIS_WORKSPACE }}
# # Cubit CI Tests
# - name: Run Cubit tests in CI env
# run: garden workflow qa-api-tests --env usw-ci --var ci-ttl=25m
# working-directory: ${{ env.OASIS_WORKSPACE }}
# # Discord Alert
# - name: Discord Alert
# if: always()
# uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
# with:
# status: ${{ job.status }}
# channel: ci
# header: QA API Tests
# additional-content: "CI Namespace: `${{env.CI_NAMESPACE}}`"

View file

@ -0,0 +1,17 @@
name: S3 Repo Backup
run-name: ${{ forgejo.actor }} S3 Repo Backup
on:
push:
branches: [ master ]
jobs:
s3-repo-backup:
steps:
- name: S3 Backup
uses: https://forgejo.dunemask.dev/elysium/elysium-actions@s3-backup
with:
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
infisical-project: ${{ vars.INFISICAL_DEPLOYMENTS_PROJECT_ID }}
- name: Status Alert
if: always()
run: echo "The Job ended with status ${{ job.status }}."

View file

@ -1,31 +0,0 @@
name: Deploy Edge Proxy
run-name: ${{ gitea.actor }} Deploy Edge Proxy
on:
push:
branches: [ master ]
env:
GARDEN_DEPLOY_ACTION: minecluster-proxy
jobs:
deploy-edge:
steps:
# Setup Oasis
- name: Oasis Setup
uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
with:
deploy-env: edge
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_EDGE_READ_TOKEN }}
# Deploy to Edge Cluster
- name: Deploy to Edge Cluster
run: garden deploy $GARDEN_DEPLOY_ACTION --force --force-build --env usw-edge
working-directory: ${{ env.OASIS_WORKSPACE }}
# Alert via Discord
- name: Discord Alert
if: always()
uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
with:
status: ${{ job.status }}
channel: deployments
header: DEPLOY EDGE
additional-content: "Minecluster Proxy"

View file

@ -1,42 +0,0 @@
name: QA API Tests
run-name: ${{ gitea.actor }} QA API Test
on:
pull_request:
branches: [ master ]
env:
REPO_DIR: ${{ gitea.workspace }}/minecluster
GARDEN_LINK_ACTION: build.minecluster-image
jobs:
qa-api-tests:
steps:
# Setup Oasis
- name: Oasis Setup
uses: https://gitea.dunemask.dev/elysium/elysium-actions@oasis-setup-auto
with:
deploy-env: ci
infisical-token: ${{ secrets.INFISICAL_ELYSIUM_CI_READ_TOKEN }}
# Test Code
- name: Checkout repository
uses: actions/checkout@v3
with:
path: ${{ env.REPO_DIR }}
# Garden link
- name: Link Repo code to Garden
run: garden link action $GARDEN_LINK_ACTION $REPO_DIR --env usw-ci --var cubit-projects=cairo,minecluster
working-directory: ${{ env.OASIS_WORKSPACE }}
# Cubit CI Tests
- name: Run Cubit tests in CI env
run: garden workflow qa-api-tests --env usw-ci --var ci-ttl=25m
working-directory: ${{ env.OASIS_WORKSPACE }}
# Discord Alert
- name: Discord Alert
if: always()
uses: https://gitea.dunemask.dev/elysium/elysium-actions@discord-status
with:
status: ${{ job.status }}
channel: ci
header: QA API Tests
additional-content: "CI Namespace: `${{env.CI_NAMESPACE}}`"

View file

@ -1,31 +0,0 @@
name: S3 Repo Backup
run-name: ${{ gitea.actor }} S3 Repo Backup
on:
push:
branches: [ master ]
env:
S3_BACKUP_ENDPOINT: https://s3.dunemask.dev
S3_BACKUP_KEY_ID: gitea-repo-backup
S3_BACKUP_KEY: ${{ secrets.S3_REPO_BACKUP_KEY }}
REPO_DIR: ${{ gitea.workspace }}/${{ gitea.respository }}
jobs:
s3-repo-backup:
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
path: ${{ env.REPO_DIR }}
- name: S3 Backup
uses: peter-evans/s3-backup@v1
env:
ACCESS_KEY_ID: ${{ env.S3_BACKUP_KEY_ID }}
SECRET_ACCESS_KEY: ${{ env.S3_BACKUP_KEY }}
MIRROR_SOURCE: ${{ env.REPO_DIR }}
MIRROR_TARGET: backups/gitea-repositories/${{ gitea.repository }}
STORAGE_SERVICE_URL: ${{env.S3_BACKUP_ENDPOINT}}
with:
args: --overwrite --remove
- name: Status Alert
if: always()
run: echo "The Job ended with status ${{ job.status }}."

1
.gitignore vendored
View file

@ -1,2 +1,3 @@
node_modules/
.env

View file

@ -0,0 +1,84 @@
import { S3, GetObjectCommand } from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { basename } from "node:path";
import { getServerEntry } from "../database/queries/server-queries.js";
import { ERR } from "../util/logging.js";
import { checkAuthorization } from "../database/queries/server-queries.js";
const s3Region = "us-east-1";
async function getS3BackupData(serverId) {
const serverEntry = await getServerEntry(serverId);
if (!serverEntry?.backupHost) return undefined;
const s3Config = {
credentials: {
accessKeyId: serverEntry.backupId,
secretAccessKey: serverEntry.backupKey,
},
endpoint: `https://${serverEntry.backupHost}`,
forcePathStyle: true,
region: s3Region,
};
const pathParts = serverEntry.backupPath.split("/");
if (pathParts[0] === "") pathParts.shift();
const bucket = pathParts.shift();
const backupPrefix = pathParts.join("/");
return { s3Config, bucket, backupPrefix };
}
export async function listS3Backups(req, res) {
const serverSpec = req.body;
if (!serverSpec.id) return res.status(400).send("Server id missing!");
const authorized = await checkAuthorization(serverSpec.id, req.cairoId);
if (!authorized)
return res
.status(403)
.send("You do not have permission to access that server!");
const s3Data = await getS3BackupData(serverSpec.id);
if (!s3Data) return res.status(409).send("Backup not configured!");
const { s3Config, bucket, backupPrefix } = s3Data;
const s3Client = new S3(s3Config);
try {
const listResponse = await s3Client.listObjectsV2({
Bucket: bucket,
Prefix: backupPrefix,
});
const files =
listResponse.Contents?.map((f) => ({
name: basename(f.Key),
lastModified: f.LastModified,
path: f.Key,
size: f.Size,
})) ?? [];
res.json(files);
} catch (e) {
ERR("S3", e);
res.sendStatus(500);
}
}
export async function getS3BackupUrl(req, res) {
const serverSpec = req.body;
if (!serverSpec.id) return res.status(400).send("Server id missing!");
if (!serverSpec.backupPath)
return res.status(400).send("Backup path missing!");
const authorized = await checkAuthorization(serverSpec.id, req.cairoId);
if (!authorized)
return res
.status(403)
.send("You do not have permission to access that server!");
const s3Data = await getS3BackupData(serverSpec.id);
if (!s3Data) return res.status(409).send("Backup not configured!");
const { s3Config, bucket } = s3Data;
const s3Client = new S3(s3Config);
try {
const command = new GetObjectCommand({
Bucket: bucket,
Key: serverSpec.backupPath,
});
const url = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
res.json({ url });
} catch (e) {
ERR("S3", e);
res.sendStatus(500);
}
}

View file

@ -1,7 +1,7 @@
CREATE SEQUENCE servers_id_seq;
CREATE TABLE servers (
id bigint NOT NULL DEFAULT nextval('servers_id_seq') PRIMARY KEY,
owner_cairo_id bigint,
owner_cairo_id varchar(63),
host varchar(255) DEFAULT NULL,
name varchar(255) DEFAULT NULL,
version varchar(63) DEFAULT 'latest',

View file

@ -16,6 +16,10 @@ const getMclName = (host, id) =>
`${host.toLowerCase().replaceAll(".", "-")}-${id}`;
export async function checkAuthorization(serverId, cairoId) {
console.log(
`Checking Authorization for user ${cairoId} for serverId ${serverId}`,
);
if (!cairoId) return false;
const q = selectWhereAllQuery(table, {
id: serverId,
owner_cairo_id: cairoId,

View file

@ -6,7 +6,7 @@ env:
image: garethflowers/ftp-server
imagePullPolicy: IfNotPresent
livenessProbe:
exec: { command: ["echo"] }
exec: { command: ["/bin/sh", "-c", "netstat -a | grep -q ftp"] }
failureThreshold: 20
initialDelaySeconds: 0
periodSeconds: 5
@ -15,7 +15,7 @@ livenessProbe:
name: changeme-name-ftp
ports: [] # Programatically add all the ports for easier readability, Ports include: 20,21,40000-400009
readinessProbe:
exec: { command: ["echo"] }
exec: { command: ["/bin/sh", "-c", "netstat -a | grep -q ftp"] }
failureThreshold: 20
initialDelaySeconds: 0
periodSeconds: 5

View file

@ -9,4 +9,6 @@ try {
} catch (e) {
kc.loadFromDefault();
}
if(kc.contexts.length === 1) kc.setCurrentContext(kc.contexts[0].name);
if(!kc.currentContext) throw new Error("Could not infer current context! Please set it manually in the Kubeconfig!");
export default kc;

View file

@ -2,7 +2,8 @@ import ftp from "basic-ftp";
import { ERR } from "../util/logging.js";
import { getServerAssets } from "./k8s-server-control.js";
import ExpressClientError from "../util/ExpressClientError.js";
import { Readable, Writable, Transform } from "node:stream";
import { Readable, Transform } from "node:stream";
import { dirname, basename } from "node:path";
const namespace = process.env.MCL_SERVER_NAMESPACE;
@ -82,7 +83,8 @@ export async function uploadServerItem(serverSpec, file) {
const { path } = serverSpec;
pathSecurityCheck(path);
await useServerFtp(serverSpec, async (c) => {
await c.uploadFrom(fileStream, path);
await c.ensureDir(dirname(path));
await c.uploadFrom(fileStream, basename(path));
}).catch(handleError);
}

View file

@ -2,11 +2,14 @@ import { Router } from "express";
import cairoAuthMiddleware from "./middlewares/auth-middleware.js";
const router = Router();
const cairoProjectId = process.env.MCL_CAIRO_PROJECT;
if(!cairoProjectId) throw Error("Cairo Project Required!");
const ok = (_r, res) => res.sendStatus(200);
function cairoRedirect(req, res) {
res.redirect(
`${process.env.MCL_CAIRO_URL}/cairo/auth?redirectUri=${req.query.redirectUri}`,
`${process.env.MCL_CAIRO_URL}/cairo/authenticate?redirectUri=${req.query.redirectUri}&projectId=${cairoProjectId}`,
);
}

View file

@ -4,21 +4,36 @@ import bearerTokenMiddleware from "express-bearer-token";
import { ERR, VERB } from "../../util/logging.js";
// Constants
const { MCL_CAIRO_URL } = process.env;
const { MCL_CAIRO_URL, MCL_CAIRO_PROJECT } = process.env;
const cairoAuthMiddleware = Router();
const cairoAuthenticate = async (token) => {
const config = { headers: { Authorization: `Bearer ${token}` } };
return fetch(`${MCL_CAIRO_URL}/api/user/info`, config).then((res) =>
res.json(),
return fetch(`${MCL_CAIRO_URL}/api/${MCL_CAIRO_PROJECT}/auth/credentials`, config).then(async (res) => {
if (res.status >= 300) {
const errorMessage = await res
.json()
.then((data) => JSON.stringify(data))
.catch(() => res.statusText);
throw Error(
`Could not authenticate with user, receieved message: ${errorMessage}`,
);
}
return res.json();
});
};
// Middleware
const cairoAuthHandler = (req, res, next) => {
if (!req.token) return res.status(401).send("Cairo auth required!");
cairoAuthenticate(req.token)
.then((authData) => (req.cairoId = authData.id))
.then((authData) => {
console.log(authData);
if (!authData?.user?.id)
throw Error(`Cairo didn't return the expected data! ${authData?.user?.id}`);
req.cairoId = authData?.user?.id;
})
.then(() => next())
.catch((err) => {
ERR("AUTH", err.response ? err.response.data : err.message);

11
lib/routes/s3-route.js Normal file
View file

@ -0,0 +1,11 @@
import { Router, json as jsonMiddleware } from "express";
import { getS3BackupUrl, listS3Backups } from "../controllers/s3-controller.js";
import cairoAuthMiddleware from "./middlewares/auth-middleware.js";
const router = Router();
router.use([cairoAuthMiddleware, jsonMiddleware()]);
router.post("/backups", listS3Backups);
router.post("/backup-url", getS3BackupUrl);
export default router;

View file

@ -8,6 +8,7 @@ import systemRoute from "../routes/system-route.js";
import serverRoute from "../routes/server-route.js";
import filesRoute from "../routes/files-route.js";
import reactRoute from "../routes/react-route.js";
import s3Route from "../routes/s3-route.js";
import {
logErrors,
clientErrorHandler,
@ -27,6 +28,7 @@ export default function buildRoutes(pg, skio) {
router.use("/api/system", systemRoute);
router.use("/api/server", serverRoute);
router.use("/api/files", filesRoute);
router.use("/api/s3", s3Route);
router.use(["/mcl", "/mcl/*"], reactRoute); // Static Build Route
/*router.use(logErrors);
router.use(clientErrorHandler);

1674
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -42,8 +42,9 @@
"vite": "^5.1.1"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.529.1",
"@aws-sdk/s3-request-presigner": "^3.529.1",
"@kubernetes/client-node": "^0.20.0",
"aws-sdk": "^2.1555.0",
"basic-ftp": "^5.0.4",
"bcrypt": "^5.1.1",
"chalk": "^5.3.0",

View file

@ -111,10 +111,12 @@ export default function MineclusterFiles(props) {
}
async function uploadFile(file) {
const filePath = file.path.startsWith("/") ? file.path : `/${file.path}`;
const formData = new FormData();
formData.append("file", file);
formData.append("id", serverId);
formData.append("path", [...dirStack, file.name].join("/"));
const path = `${[...dirStack].join("/")}${filePath}`;
formData.append("path", path);
await fetch("/api/files/upload", {
method: "POST",
body: formData,

View file

@ -0,0 +1,88 @@
import { useEffect, useState } from "react";
import useMediaQuery from "@mui/material/useMediaQuery";
import { useTheme } from "@mui/material/styles";
import Button from "@mui/material/Button";
import DialogTitle from "@mui/material/DialogTitle";
import DialogContent from "@mui/material/DialogContent";
import DialogActions from "@mui/material/DialogActions";
import Dialog from "@mui/material/Dialog";
import IconButton from "@mui/material/IconButton";
import Toolbar from "@mui/material/Toolbar";
import Typography from "@mui/material/Typography";
import Stack from "@mui/material/Stack";
import DownloadIcon from "@mui/icons-material/Download";
import { getBackupUrl, getServerBackups } from "../../util/queries";
export function useBackupDialog(isOpen = false) {
const [open, setOpen] = useState(isOpen);
const dialogToggle = () => setOpen(!open);
return [open, dialogToggle];
}
export default function BackupDialog(props) {
const { serverId, open, dialogToggle } = props;
const theme = useTheme();
const fullScreen = useMediaQuery(theme.breakpoints.down("md"));
const [backups, setBackups] = useState([]);
function refreshUpdateList() {
getServerBackups(serverId).then(setBackups);
}
useEffect(() => {
if (!serverId) return;
refreshUpdateList();
}, [serverId, open]);
function normalizeLastModified(lastModified) {
const d = new Date(Date.parse(lastModified));
return `${d.getFullYear()}-${d.getMonth()}-${d.getDate()} ${d.getHours()}:${d.getMinutes()}`;
}
const downloadBackup = (backup) =>
async function openBackupLink() {
const { url } = await getBackupUrl(serverId, backup.path);
window.open(url, "_blank").focus();
};
const normalizedSize = (size) => `${(size / Math.pow(1024, 3)).toFixed(2)}GB`;
return (
<Dialog
fullWidth
maxWidth="lg"
open={open}
fullScreen={fullScreen}
PaperProps={!fullScreen ? { sx: { height: "60%" } } : undefined}
>
<Toolbar sx={{ display: { md: "none" } }} />
<DialogTitle>Backups</DialogTitle>
<DialogContent sx={{ height: "100%" }}>
{backups.map((backup, i) => (
<Stack key={i} sx={{ width: "100%" }} direction="row">
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "40%" }}>
{backup.name}
</Typography>
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "20%" }}>
{normalizeLastModified(backup.lastModified)}
</Typography>
<Typography variant="subtitle2" sx={{ m: "auto 0", width: "40%" }}>
{normalizedSize(backup.size)}
</Typography>
<IconButton
sx={{ marginLeft: "auto" }}
onClick={downloadBackup(backup)}
>
<DownloadIcon />
</IconButton>
</Stack>
))}
</DialogContent>
<DialogActions>
<Button autoFocus onClick={dialogToggle}>
Close
</Button>
</DialogActions>
</Dialog>
);
}

View file

@ -61,6 +61,7 @@ export default function RconView(props) {
color: "white",
borderRadius: "4px",
width: "100%",
height: "100%",
}}
>
{logs.length === 0 &&

View file

@ -14,10 +14,11 @@ import PlayArrowIcon from "@mui/icons-material/PlayArrow";
import DeleteForeverIcon from "@mui/icons-material/DeleteForever";
import EditIcon from "@mui/icons-material/Edit";
import FolderIcon from "@mui/icons-material/Folder";
import BackupIcon from "@mui/icons-material/Backup";
import { Link } from "react-router-dom";
export default function ServerCard(props) {
const { server, openRcon } = props;
const { server, openRcon, openBackups } = props;
const { name, id, metrics, ftpAvailable, serverAvailable, services } = server;
const startServer = useStartServer(id);
const stopServer = useStopServer(id);
@ -117,6 +118,14 @@ export default function ServerCard(props) {
>
<EditIcon />
</IconButton>
<IconButton
color="info"
aria-label="Backups"
size="large"
onClick={openBackups}
>
<BackupIcon />
</IconButton>
<IconButton
color="info"
aria-label="Files"

View file

@ -12,12 +12,16 @@ import SpeedDialIcon from "@mui/material/SpeedDialIcon";
import "@mcl/css/server-card.css";
import "@mcl/css/overview.css";
import { useServerInstances } from "@mcl/queries";
import BackupDialog, {
useBackupDialog,
} from "../components/servers/BackupsDialog";
export default function Home() {
const clusterMetrics = { cpu: 0, memory: 0 };
const [server, setServer] = useState();
const [servers, setServers] = useState([]);
const [rdOpen, rconToggle] = useRconDialog();
const [bkOpen, backupsToggle] = useBackupDialog();
const { isLoading, data: serversData } = useServerInstances();
const serverInstances = serversData ?? [];
useEffect(() => {
@ -31,6 +35,11 @@ export default function Home() {
rconToggle();
};
const openBackups = (s) => () => {
setServer(s);
backupsToggle();
};
return (
<Box className="home">
<Overview clusterMetrics={clusterMetrics} />
@ -51,10 +60,20 @@ export default function Home() {
<Box className="servers">
{!isLoading &&
servers.map((s, k) => (
<ServerCard key={k} server={s} openRcon={openRcon(s)} />
<ServerCard
key={k}
server={s}
openRcon={openRcon(s)}
openBackups={openBackups(s)}
/>
))}
</Box>
<RconDialog open={rdOpen} dialogToggle={rconToggle} server={server} />
<BackupDialog
open={bkOpen}
dialogToggle={backupsToggle}
serverId={server?.id}
/>
<Button
component={Link}
to="/mcl/create"

View file

@ -1,8 +1,8 @@
import { useState, useEffect } from "react";
import { useSearchParams } from "react-router-dom";
const tokenStorageName = "cairoAuthToken";
const tokenQuery = "cairoAuthToken";
const tokenStorageName = "cairoUserToken";
const tokenQuery = "cairoUserToken";
const verifyAuth = (authToken) =>
fetch("/api/auth/verify", {

View file

@ -54,6 +54,11 @@ export const useGetServer = (serverId) =>
queryFn: fetchApiPost("/server/blueprint", { id: serverId }),
});
export const getServerBackups = (serverId) =>
fetchApiCore("/s3/backups", { id: serverId }, "POST", true);
export const getBackupUrl = (serverId, backupPath) =>
fetchApiCore("/s3/backup-url", { id: serverId, backupPath }, "POST", true);
export const getServerFiles = async (serverId, path) =>
fetchApiCore("/files/list", { id: serverId, path }, "POST", true);
export const createServerFolder = async (serverId, path) =>