From 2a3ad5e78f895df22da817f2cfb048c5714d16df Mon Sep 17 00:00:00 2001 From: hibna Date: Thu, 26 Feb 2026 21:01:00 +0000 Subject: [PATCH] feat: overhaul server automation, files editor, and CS2 setup workflows --- README.md | 2 +- apps/api/package.json | 4 + apps/api/src/index.ts | 2 + apps/api/src/lib/daemon.ts | 252 ++++- apps/api/src/lib/server-automation.ts | 793 ++++++++++++++++ apps/api/src/routes/admin/index.ts | 2 + apps/api/src/routes/admin/schemas.ts | 2 + apps/api/src/routes/games/index.ts | 16 + apps/api/src/routes/internal/index.ts | 81 +- apps/api/src/routes/nodes/index.ts | 58 +- apps/api/src/routes/servers/backups.ts | 120 ++- apps/api/src/routes/servers/config.ts | 29 +- apps/api/src/routes/servers/files.ts | 40 +- apps/api/src/routes/servers/index.ts | 163 +++- apps/api/src/routes/servers/plugins.ts | 665 +++++++++++-- apps/api/src/routes/servers/schedules.ts | 51 +- apps/daemon/Cargo.lock | 1 + apps/daemon/Cargo.toml | 1 + apps/daemon/src/docker/container.rs | 54 +- apps/daemon/src/game/cs2.rs | 113 ++- apps/daemon/src/grpc/service.rs | 361 +++++-- apps/daemon/src/main.rs | 10 +- apps/daemon/src/server/manager.rs | 49 +- apps/web/src/lib/api.ts | 19 +- apps/web/src/pages/admin/games.tsx | 165 +++- apps/web/src/pages/nodes/detail.tsx | 4 +- apps/web/src/pages/server/config.tsx | 6 +- apps/web/src/pages/server/files.tsx | 898 ++++++++++++++---- apps/web/src/pages/server/plugins.tsx | 478 +++++++++- apps/web/src/pages/server/settings.tsx | 200 +++- apps/web/src/pages/servers/create.tsx | 4 +- apps/web/src/pages/settings/members.tsx | 70 +- .../drizzle/0001_game_automation_rules.sql | 36 + .../drizzle/0002_cs2_add_metamod_workflow.sql | 35 + packages/database/drizzle/meta/_journal.json | 27 + packages/database/src/schema/games.ts | 1 + packages/database/src/seed.ts | 68 +- packages/proto/src/index.ts | 8 +- packages/shared/src/types.ts | 64 ++ pnpm-lock.yaml | 191 ++++ 40 files changed, 4675 insertions(+), 468 deletions(-) create mode 100644 apps/api/src/lib/server-automation.ts create mode 100644 apps/api/src/routes/games/index.ts create mode 100644 packages/database/drizzle/0001_game_automation_rules.sql create mode 100644 packages/database/drizzle/0002_cs2_add_metamod_workflow.sql create mode 100644 packages/database/drizzle/meta/_journal.json diff --git a/README.md b/README.md index 0e6f8f6..9136ec6 100644 --- a/README.md +++ b/README.md @@ -153,7 +153,7 @@ source-gamepanel/ | Game | Docker Image | Default Port | Config Format | Plugin Support | |------|-------------|-------------|---------------|---------------| | Minecraft: Java Edition | `itzg/minecraft-server` | 25565 | `.properties`, `.yml`, `.json` | Spiget API + manual | -| Counter-Strike 2 | `cm2network/csgo` | 27015 | Source `.cfg` (keyvalue) | Manual | +| Counter-Strike 2 | `cm2network/cs2` | 27015 | Source `.cfg` (keyvalue) | Manual | | Minecraft: Bedrock Edition | `itzg/minecraft-bedrock-server` | 19132 | `.properties` | — | | Terraria | `ryshe/terraria` | 7777 | keyvalue | — | | Rust | `didstopia/rust-server` | 28015 | — | — | diff --git a/apps/api/package.json b/apps/api/package.json index 80c6796..cdfaa60 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -26,10 +26,14 @@ "drizzle-orm": "^0.38.0", "fastify": "^5.2.0", "fastify-plugin": "^5.0.0", + "tar-stream": "^3.1.7", + "unzipper": "^0.12.3", "pino-pretty": "^13.0.0", "socket.io": "^4.8.0" }, "devDependencies": { + "@types/tar-stream": "^3.1.4", + "@types/unzipper": "^0.10.11", "dotenv-cli": "^8.0.0", "tsx": "^4.19.0" } diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 6091f71..7a7ac25 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -13,6 +13,7 @@ import daemonNodeRoutes from './routes/nodes/daemon.js'; import nodeRoutes from './routes/nodes/index.js'; import serverRoutes from './routes/servers/index.js'; import adminRoutes from './routes/admin/index.js'; +import gameRoutes from './routes/games/index.js'; import { AppError } from './lib/errors.js'; const app = Fastify({ @@ -87,6 +88,7 @@ app.get('/api/health', async () => { await app.register(authRoutes, { prefix: '/api/auth' }); await app.register(organizationRoutes, { prefix: '/api/organizations' }); await app.register(adminRoutes, { prefix: '/api/admin' }); +await app.register(gameRoutes, { prefix: '/api/games' }); await app.register(daemonNodeRoutes, { prefix: '/api/nodes' }); await app.register(internalRoutes, { prefix: '/api/internal' }); diff --git a/apps/api/src/lib/daemon.ts b/apps/api/src/lib/daemon.ts index d73ebca..3350b06 100644 --- a/apps/api/src/lib/daemon.ts +++ b/apps/api/src/lib/daemon.ts @@ -32,6 +32,21 @@ interface DaemonServerResponse { status: string; } +interface DaemonNodeStatusRaw { + version: string; + is_healthy: boolean; + uptime_seconds: number; + active_servers: number; +} + +interface DaemonNodeStatsRaw { + cpu_percent: number; + memory_used: number; + memory_total: number; + disk_used: number; + disk_total: number; +} + interface DaemonStatusResponse { uuid: string; state: string; @@ -66,6 +81,13 @@ interface DaemonPlayerListRaw { max_players: number; } +interface DaemonBackupResponseRaw { + backup_id: string; + size_bytes: number; + checksum: string; + success: boolean; +} + export interface DaemonConsoleOutput { uuid: string; line: string; @@ -95,9 +117,40 @@ export interface DaemonPlayersResponse { maxPlayers: number; } +export interface DaemonBackupResponse { + backupId: string; + sizeBytes: number; + checksum: string; + success: boolean; +} + +export interface DaemonNodeStatus { + version: string; + isHealthy: boolean; + uptimeSeconds: number; + activeServers: number; +} + +export interface DaemonNodeStats { + cpuPercent: number; + memoryUsed: number; + memoryTotal: number; + diskUsed: number; + diskTotal: number; +} + type UnaryCallback = (error: grpc.ServiceError | null, response: TResponse) => void; interface DaemonServiceClient extends grpc.Client { + getNodeStatus( + request: EmptyResponse, + metadata: grpc.Metadata, + callback: UnaryCallback, + ): void; + streamNodeStats( + request: EmptyResponse, + metadata: grpc.Metadata, + ): grpc.ClientReadableStream; createServer( request: DaemonCreateServerRequest, metadata: grpc.Metadata, @@ -147,6 +200,21 @@ interface DaemonServiceClient extends grpc.Client { metadata: grpc.Metadata, callback: UnaryCallback, ): void; + createBackup( + request: { server_uuid: string; backup_id: string; cdn_upload_url?: string }, + metadata: grpc.Metadata, + callback: UnaryCallback, + ): void; + restoreBackup( + request: { server_uuid: string; backup_id: string; cdn_download_url?: string }, + metadata: grpc.Metadata, + callback: UnaryCallback, + ): void; + deleteBackup( + request: { server_uuid: string; backup_id: string }, + metadata: grpc.Metadata, + callback: UnaryCallback, + ): void; getActivePlayers( request: { uuid: string }, metadata: grpc.Metadata, @@ -183,27 +251,34 @@ const POWER_ACTIONS: Record = { kill: 3, }; +const MAX_GRPC_MESSAGE_BYTES = 32 * 1024 * 1024; + function buildGrpcTarget(fqdn: string, grpcPort: number): string { const trimmed = fqdn.trim(); if (!trimmed) throw new Error('Node FQDN is empty'); + let host = trimmed; if (trimmed.includes('://')) { try { const parsed = new URL(trimmed); - const host = parsed.hostname || parsed.host; + host = parsed.hostname || parsed.host; if (!host) throw new Error('Node FQDN has no hostname'); - if (parsed.port) return `${host}:${parsed.port}`; - return `${host}:${grpcPort}`; } catch { // Fall through to raw handling below. } } - const withoutPath = trimmed.replace(/\/.*$/, ''); + const withoutPath = host.replace(/\/.*$/, ''); if (/^\[.+\](?::\d+)?$/.test(withoutPath)) { - return /\]:\d+$/.test(withoutPath) ? withoutPath : `${withoutPath}:${grpcPort}`; + const innerHost = withoutPath + .replace(/^\[/, '') + .replace(/\](?::\d+)?$/, ''); + return `[${innerHost}]:${grpcPort}`; + } + if (/^[^:]+:\d+$/.test(withoutPath)) { + const hostOnly = withoutPath.replace(/:\d+$/, ''); + return `${hostOnly}:${grpcPort}`; } - if (/^[^:]+:\d+$/.test(withoutPath)) return withoutPath; if (withoutPath.includes(':')) return `[${withoutPath}]:${grpcPort}`; return `${withoutPath}:${grpcPort}`; } @@ -219,6 +294,10 @@ function createClient(node: DaemonNodeConnection): DaemonServiceClient { return new DaemonService( target, grpc.credentials.createInsecure(), + { + 'grpc.max_send_message_length': MAX_GRPC_MESSAGE_BYTES, + 'grpc.max_receive_message_length': MAX_GRPC_MESSAGE_BYTES, + }, ) as unknown as DaemonServiceClient; } @@ -262,6 +341,46 @@ function callUnary( }); } +function readFirstStreamMessage( + stream: grpc.ClientReadableStream, + timeoutMs: number, +): Promise { + return new Promise((resolve, reject) => { + let completed = false; + + const timeout = setTimeout(() => { + if (completed) return; + completed = true; + reject(new Error(`gRPC stream timed out after ${timeoutMs}ms`)); + }, timeoutMs); + + const onData = (message: TMessage) => { + if (completed) return; + completed = true; + clearTimeout(timeout); + resolve(message); + }; + + const onError = (error: Error) => { + if (completed) return; + completed = true; + clearTimeout(timeout); + reject(error); + }; + + const onEnd = () => { + if (completed) return; + completed = true; + clearTimeout(timeout); + reject(new Error('gRPC stream ended before first message')); + }; + + stream.on('data', onData); + stream.on('error', onError); + stream.on('end', onEnd); + }); +} + function toBuffer(data: Uint8Array | Buffer): Buffer { if (Buffer.isBuffer(data)) return data; return Buffer.from(data); @@ -270,6 +389,49 @@ function toBuffer(data: Uint8Array | Buffer): Buffer { const DEFAULT_CONNECT_TIMEOUT_MS = 8_000; const DEFAULT_RPC_TIMEOUT_MS = 20_000; +export async function daemonGetNodeStatus( + node: DaemonNodeConnection, +): Promise { + const client = createClient(node); + try { + await waitForReady(client, DEFAULT_CONNECT_TIMEOUT_MS); + const response = await callUnary( + (callback) => client.getNodeStatus({}, getMetadata(node.daemonToken), callback), + DEFAULT_RPC_TIMEOUT_MS, + ); + + return { + version: response.version, + isHealthy: response.is_healthy, + uptimeSeconds: Number(response.uptime_seconds), + activeServers: Number(response.active_servers), + }; + } finally { + client.close(); + } +} + +export async function daemonGetNodeStats( + node: DaemonNodeConnection, +): Promise { + const client = createClient(node); + try { + await waitForReady(client, DEFAULT_CONNECT_TIMEOUT_MS); + const stream = client.streamNodeStats({}, getMetadata(node.daemonToken)); + const response = await readFirstStreamMessage(stream, DEFAULT_RPC_TIMEOUT_MS); + + return { + cpuPercent: Number(response.cpu_percent), + memoryUsed: Number(response.memory_used), + memoryTotal: Number(response.memory_total), + diskUsed: Number(response.disk_used), + diskTotal: Number(response.disk_total), + }; + } finally { + client.close(); + } +} + export async function daemonCreateServer( node: DaemonNodeConnection, request: DaemonCreateServerRequest, @@ -468,6 +630,84 @@ export async function daemonDeleteFiles( } } +export async function daemonCreateBackup( + node: DaemonNodeConnection, + serverUuid: string, + backupId: string, +): Promise { + const client = createClient(node); + try { + await waitForReady(client, DEFAULT_CONNECT_TIMEOUT_MS); + const response = await callUnary( + (callback) => + client.createBackup( + { server_uuid: serverUuid, backup_id: backupId }, + getMetadata(node.daemonToken), + callback, + ), + DEFAULT_RPC_TIMEOUT_MS, + ); + + return { + backupId: response.backup_id, + sizeBytes: Number(response.size_bytes), + checksum: response.checksum, + success: response.success, + }; + } finally { + client.close(); + } +} + +export async function daemonRestoreBackup( + node: DaemonNodeConnection, + serverUuid: string, + backupId: string, + cdnPath?: string | null, +): Promise { + const client = createClient(node); + try { + await waitForReady(client, DEFAULT_CONNECT_TIMEOUT_MS); + await callUnary( + (callback) => + client.restoreBackup( + { + server_uuid: serverUuid, + backup_id: backupId, + cdn_download_url: cdnPath ?? '', + }, + getMetadata(node.daemonToken), + callback, + ), + DEFAULT_RPC_TIMEOUT_MS, + ); + } finally { + client.close(); + } +} + +export async function daemonDeleteBackup( + node: DaemonNodeConnection, + serverUuid: string, + backupId: string, +): Promise { + const client = createClient(node); + try { + await waitForReady(client, DEFAULT_CONNECT_TIMEOUT_MS); + await callUnary( + (callback) => + client.deleteBackup( + { server_uuid: serverUuid, backup_id: backupId }, + getMetadata(node.daemonToken), + callback, + ), + DEFAULT_RPC_TIMEOUT_MS, + ); + } finally { + client.close(); + } +} + export async function daemonGetActivePlayers( node: DaemonNodeConnection, serverUuid: string, diff --git a/apps/api/src/lib/server-automation.ts b/apps/api/src/lib/server-automation.ts new file mode 100644 index 0000000..d70b6e9 --- /dev/null +++ b/apps/api/src/lib/server-automation.ts @@ -0,0 +1,793 @@ +import { gunzipSync } from 'node:zlib'; +import type { FastifyInstance } from 'fastify'; +import * as tar from 'tar-stream'; +import type { Headers } from 'tar-stream'; +import * as unzipper from 'unzipper'; +import type { + GameAutomationRule, + ServerAutomationEvent, + ServerAutomationAction, + ServerAutomationGitHubReleaseExtractAction, + ServerAutomationHttpDirectoryExtractAction, +} from '@source/shared'; +import { + daemonReadFile, + daemonSendCommand, + daemonWriteFile, + type DaemonNodeConnection, +} from './daemon.js'; + +const DEFAULT_RELEASE_MAX_BYTES = 256 * 1024 * 1024; +const DEFAULT_DOWNLOAD_TIMEOUT_MS = 120_000; +const AUTOMATION_MARKER_ROOT = '/.gamepanel/automation'; + +const DEFAULT_GAME_AUTOMATION_RULES: Record = { + cs2: [ + { + id: 'cs2-install-latest-metamod', + event: 'server.install.completed', + enabled: true, + runOncePerServer: true, + continueOnError: false, + actions: [ + { + id: 'install-cs2-metamod', + type: 'http_directory_extract', + indexUrl: 'https://mms.alliedmods.net/mmsdrop/2.0/', + assetNamePattern: '^mmsource-2\\.0\\.0-git\\d+-linux\\.tar\\.gz$', + destination: '/game/csgo', + stripComponents: 0, + maxBytes: DEFAULT_RELEASE_MAX_BYTES, + }, + ], + }, + { + id: 'cs2-install-latest-counterstrikesharp-runtime', + event: 'server.install.completed', + enabled: true, + runOncePerServer: true, + continueOnError: false, + actions: [ + { + id: 'install-cs2-runtime', + type: 'github_release_extract', + owner: 'roflmuffin', + repo: 'CounterStrikeSharp', + assetNamePatterns: [ + '^counterstrikesharp-with-runtime-.*linux.*\\.zip$', + '^counterstrikesharp-with-runtime.*\\.zip$', + ], + destination: '/game/csgo', + stripComponents: 0, + maxBytes: DEFAULT_RELEASE_MAX_BYTES, + }, + ], + }, + ], +}; + +interface ServerAutomationContext { + serverId: string; + serverUuid: string; + gameSlug: string; + event: ServerAutomationEvent; + node: DaemonNodeConnection; + automationRulesRaw: unknown; + force?: boolean; +} + +export interface ServerAutomationRunResult { + workflowsMatched: number; + workflowsExecuted: number; + workflowsSkipped: number; + workflowsFailed: number; + actionFailures: number; + failures: ServerAutomationFailure[]; +} + +interface ExtractedFile { + path: string; + data: Buffer; +} + +export interface ServerAutomationFailure { + level: 'action' | 'workflow'; + workflowId: string; + actionId?: string; + message: string; +} + +interface GitHubReleaseAsset { + name: string; + browser_download_url: string; + size: number; +} + +interface GitHubReleaseResponse { + tag_name: string; + assets: GitHubReleaseAsset[]; +} + +function isObject(value: unknown): value is Record { + return typeof value === 'object' && value !== null; +} + +function errorMessage(error: unknown): string { + if (error instanceof Error) return error.message; + return String(error); +} + +function readWorkflowId(value: unknown): string | null { + if (!isObject(value)) return null; + const id = value.id; + if (typeof id !== 'string' || id.trim() === '') return null; + return id; +} + +function normalizeWorkflow( + gameSlug: string, + workflow: GameAutomationRule, +): GameAutomationRule { + if (gameSlug.toLowerCase() !== 'cs2') return workflow; + if (workflow.id !== 'cs2-install-latest-counterstrikesharp-runtime') return workflow; + + const normalizedActions = workflow.actions.map((action) => { + if (action.type !== 'github_release_extract') return action; + if (action.id !== 'install-cs2-runtime') return action; + + const destination = (action.destination ?? '').trim(); + if (destination !== '' && destination !== '/') return action; + + return { + ...action, + destination: '/game/csgo', + }; + }); + + return { + ...workflow, + actions: normalizedActions, + }; +} + +function asAutomationRules(raw: unknown, gameSlug: string): GameAutomationRule[] { + const defaults = DEFAULT_GAME_AUTOMATION_RULES[gameSlug.toLowerCase()] ?? []; + if (!Array.isArray(raw)) { + return defaults.map((workflow) => normalizeWorkflow(gameSlug, workflow)); + } + + const configured = raw as GameAutomationRule[]; + if (defaults.length === 0) { + return configured.map((workflow) => normalizeWorkflow(gameSlug, workflow)); + } + + const existingIds = new Set( + raw + .map(readWorkflowId) + .filter((workflowId): workflowId is string => workflowId !== null), + ); + + const missingDefaults = defaults.filter((workflow) => !existingIds.has(workflow.id)); + if (missingDefaults.length === 0) { + return configured.map((workflow) => normalizeWorkflow(gameSlug, workflow)); + } + + return [...configured, ...missingDefaults].map((workflow) => normalizeWorkflow(gameSlug, workflow)); +} + +function markerPath(event: ServerAutomationEvent, workflowId: string): string { + const cleanId = workflowId.trim().replace(/[^a-zA-Z0-9._-]+/g, '-'); + return `${AUTOMATION_MARKER_ROOT}/${event}/${cleanId}.json`; +} + +function isMissingFileError(error: unknown): boolean { + const message = error instanceof Error ? error.message : String(error); + return ( + message.includes('No such file or directory') || + message.includes('NOT_FOUND') || + message.includes('status code 404') + ); +} + +function normalizePathSegments(path: string): string[] { + return path + .replace(/\\/g, '/') + .split('/') + .filter((segment) => segment && segment !== '.' && segment !== '..'); +} + +function joinServerPath(base: string, relative: string): string { + const baseSegments = normalizePathSegments(base); + const relativeSegments = normalizePathSegments(relative); + return `/${[...baseSegments, ...relativeSegments].join('/')}`.replace(/\/{2,}/g, '/'); +} + +function normalizeArchivePath(path: string, stripComponents = 0): string | null { + const segments = normalizePathSegments(path); + const stripped = segments.slice(Math.max(0, stripComponents)); + if (stripped.length === 0) return null; + return stripped.join('/'); +} + +async function hasMarker( + node: DaemonNodeConnection, + serverUuid: string, + event: ServerAutomationEvent, + workflowId: string, +): Promise { + try { + await daemonReadFile(node, serverUuid, markerPath(event, workflowId)); + return true; + } catch (error) { + if (isMissingFileError(error)) return false; + throw error; + } +} + +async function writeMarker( + node: DaemonNodeConnection, + serverUuid: string, + event: ServerAutomationEvent, + workflowId: string, + payload: Record, +): Promise { + await daemonWriteFile( + node, + serverUuid, + markerPath(event, workflowId), + JSON.stringify(payload, null, 2), + ); +} + +function githubHeaders(): Record { + const headers: Record = { + Accept: 'application/vnd.github+json', + 'User-Agent': 'SourceGamePanel/1.0', + }; + + const token = process.env.GITHUB_TOKEN?.trim(); + if (token) { + headers.Authorization = `Bearer ${token}`; + } + + return headers; +} + +function compileAssetPatterns(patterns: string[]): RegExp[] { + const compiled: RegExp[] = []; + const seen = new Set(); + + const tryCompile = (pattern: string) => { + const key = pattern.trim(); + if (!key || seen.has(key)) return; + try { + compiled.push(new RegExp(key, 'i')); + seen.add(key); + } catch { + // Ignore invalid regex patterns in configuration. + } + }; + + for (const pattern of patterns) { + tryCompile(pattern); + + // Some JSON-stored patterns may be over-escaped (e.g. "\\\\." instead of "\\."). + // Collapse double backslashes once and compile a fallback variant. + if (pattern.includes('\\\\')) { + tryCompile(pattern.replace(/\\\\/g, '\\')); + } + } + + return compiled; +} + +async function fetchLatestRelease( + action: ServerAutomationGitHubReleaseExtractAction, +): Promise { + const releaseUrl = `https://api.github.com/repos/${action.owner}/${action.repo}/releases/latest`; + const response = await fetch(releaseUrl, { + headers: githubHeaders(), + }); + + if (!response.ok) { + throw new Error( + `GitHub latest release request failed (${action.owner}/${action.repo}): HTTP ${response.status}`, + ); + } + + const release = (await response.json()) as GitHubReleaseResponse; + if (!Array.isArray(release.assets)) { + throw new Error(`GitHub release payload has no assets (${action.owner}/${action.repo})`); + } + + return release; +} + +interface DirectoryAssetCandidate { + name: string; + downloadUrl: string; +} + +function extractNumberParts(value: string): number[] { + const matches = value.match(/\d+/g); + if (!matches) return []; + return matches + .map((part) => Number.parseInt(part, 10)) + .filter((num) => Number.isFinite(num)); +} + +function compareNumberPartsDesc(a: number[], b: number[]): number { + const maxLength = Math.max(a.length, b.length); + for (let i = 0; i < maxLength; i += 1) { + const left = a[i] ?? -1; + const right = b[i] ?? -1; + if (left !== right) { + return right - left; + } + } + return 0; +} + +function pickLatestDirectoryAsset(candidates: DirectoryAssetCandidate[]): DirectoryAssetCandidate { + const sorted = [...candidates].sort((left, right) => { + const numberDiff = compareNumberPartsDesc( + extractNumberParts(left.name), + extractNumberParts(right.name), + ); + if (numberDiff !== 0) return numberDiff; + return right.name.localeCompare(left.name); + }); + + return sorted[0] ?? candidates[0]!; +} + +function extractDirectoryCandidates( + html: string, + indexUrl: string, + assetPattern: RegExp, +): DirectoryAssetCandidate[] { + const hrefRegex = /href\s*=\s*(['"])(.*?)\1/gi; + const candidates: DirectoryAssetCandidate[] = []; + + let match: RegExpExecArray | null = null; + while ((match = hrefRegex.exec(html)) !== null) { + const href = (match[2] ?? '').trim(); + if (!href || href.endsWith('/')) continue; + + try { + const resolvedUrl = new URL(href, indexUrl); + const filename = decodeURIComponent(resolvedUrl.pathname.split('/').filter(Boolean).pop() ?? ''); + if (!filename || !assetPattern.test(filename)) continue; + + candidates.push({ + name: filename, + downloadUrl: resolvedUrl.toString(), + }); + } catch { + // Ignore malformed links. + } + } + + return candidates; +} + +async function resolveLatestDirectoryAsset( + action: ServerAutomationHttpDirectoryExtractAction, +): Promise { + let assetPattern: RegExp; + try { + assetPattern = new RegExp(action.assetNamePattern, 'i'); + } catch { + throw new Error(`Invalid assetNamePattern regex for action ${action.id}`); + } + + const response = await fetch(action.indexUrl, { + headers: { 'User-Agent': 'SourceGamePanel/1.0' }, + }); + + if (!response.ok) { + throw new Error( + `Directory listing request failed (${action.indexUrl}): HTTP ${response.status}`, + ); + } + + const html = await response.text(); + const candidates = extractDirectoryCandidates(html, action.indexUrl, assetPattern); + if (candidates.length === 0) { + throw new Error( + `No matching directory asset for ${action.indexUrl} with pattern: ${action.assetNamePattern}`, + ); + } + + return pickLatestDirectoryAsset(candidates); +} + +async function downloadBinary(url: string, maxBytes: number): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), DEFAULT_DOWNLOAD_TIMEOUT_MS); + + try { + const response = await fetch(url, { + headers: { + 'User-Agent': 'SourceGamePanel/1.0', + }, + redirect: 'follow', + signal: controller.signal, + }); + + if (!response.ok) { + throw new Error(`Download failed with HTTP ${response.status}: ${url}`); + } + + const contentLength = Number(response.headers.get('content-length') ?? '0'); + if (contentLength > maxBytes) { + throw new Error(`Artifact exceeds max size (${contentLength} > ${maxBytes} bytes)`); + } + + const buffer = Buffer.from(await response.arrayBuffer()); + if (buffer.length === 0) { + throw new Error('Downloaded artifact is empty'); + } + if (buffer.length > maxBytes) { + throw new Error(`Artifact exceeds max size (${buffer.length} > ${maxBytes} bytes)`); + } + + return buffer; + } finally { + clearTimeout(timeout); + } +} + +async function extractZipFiles(buffer: Buffer, stripComponents = 0): Promise { + const archive = await unzipper.Open.buffer(buffer); + const files: ExtractedFile[] = []; + + for (const entry of archive.files) { + if (entry.type !== 'File') continue; + + const normalized = normalizeArchivePath(entry.path, stripComponents); + if (!normalized) continue; + + files.push({ + path: normalized, + data: await entry.buffer(), + }); + } + + return files; +} + +function extractTarFiles(buffer: Buffer, stripComponents = 0): Promise { + return new Promise((resolve, reject) => { + const extract = tar.extract(); + const files: ExtractedFile[] = []; + + extract.on('entry', (header: Headers, stream, next) => { + const type = header.type ?? 'file'; + const normalized = normalizeArchivePath(header.name, stripComponents); + const isFileType = type === 'file' || type === 'contiguous-file'; + + if (!isFileType || !normalized) { + stream.resume(); + stream.on('end', next); + stream.on('error', reject); + return; + } + + const chunks: Buffer[] = []; + stream.on('data', (chunk: Buffer) => { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); + }); + stream.on('end', () => { + files.push({ path: normalized, data: Buffer.concat(chunks) }); + next(); + }); + stream.on('error', reject); + }); + + extract.on('finish', () => resolve(files)); + extract.on('error', reject); + extract.end(buffer); + }); +} + +async function extractArtifactFiles( + artifact: Buffer, + assetName: string, + stripComponents = 0, +): Promise { + const name = assetName.toLowerCase(); + + if (name.endsWith('.zip')) { + return extractZipFiles(artifact, stripComponents); + } + + if (name.endsWith('.tar.gz') || name.endsWith('.tgz')) { + return extractTarFiles(gunzipSync(artifact), stripComponents); + } + + if (name.endsWith('.tar')) { + return extractTarFiles(artifact, stripComponents); + } + + const normalized = normalizeArchivePath(assetName, stripComponents) ?? assetName; + return [{ path: normalized, data: artifact }]; +} + +async function executeGitHubReleaseExtract( + app: FastifyInstance, + context: ServerAutomationContext, + action: ServerAutomationGitHubReleaseExtractAction, +): Promise { + const release = await fetchLatestRelease(action); + const patterns = compileAssetPatterns(action.assetNamePatterns); + + if (patterns.length === 0) { + throw new Error(`No valid asset regex pattern for action ${action.id}`); + } + + const asset = release.assets.find((candidate) => + patterns.some((pattern) => pattern.test(candidate.name)), + ); + + if (!asset) { + throw new Error( + `No matching release asset for ${action.owner}/${action.repo} with patterns: ${action.assetNamePatterns.join(', ')}`, + ); + } + + const maxBytes = Number(action.maxBytes) > 0 ? Number(action.maxBytes) : DEFAULT_RELEASE_MAX_BYTES; + const artifact = await downloadBinary(asset.browser_download_url, maxBytes); + const files = await extractArtifactFiles( + artifact, + asset.name, + Number(action.stripComponents) || 0, + ); + + if (files.length === 0) { + throw new Error(`Extracted artifact has no files: ${asset.name}`); + } + + const destination = action.destination ?? '/'; + + for (const file of files) { + const targetPath = joinServerPath(destination, file.path); + await daemonWriteFile(context.node, context.serverUuid, targetPath, file.data); + } + + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + actionId: action.id, + release: release.tag_name, + asset: asset.name, + filesWritten: files.length, + }, + 'Automation action completed: github_release_extract', + ); +} + +async function executeHttpDirectoryExtract( + app: FastifyInstance, + context: ServerAutomationContext, + action: ServerAutomationHttpDirectoryExtractAction, +): Promise { + const selectedAsset = await resolveLatestDirectoryAsset(action); + const maxBytes = Number(action.maxBytes) > 0 ? Number(action.maxBytes) : DEFAULT_RELEASE_MAX_BYTES; + const artifact = await downloadBinary(selectedAsset.downloadUrl, maxBytes); + const files = await extractArtifactFiles( + artifact, + selectedAsset.name, + Number(action.stripComponents) || 0, + ); + + if (files.length === 0) { + throw new Error(`Extracted artifact has no files: ${selectedAsset.name}`); + } + + const destination = action.destination ?? '/'; + for (const file of files) { + const targetPath = joinServerPath(destination, file.path); + await daemonWriteFile(context.node, context.serverUuid, targetPath, file.data); + } + + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + actionId: action.id, + source: action.indexUrl, + asset: selectedAsset.name, + filesWritten: files.length, + }, + 'Automation action completed: http_directory_extract', + ); +} + +async function executeAction( + app: FastifyInstance, + context: ServerAutomationContext, + action: ServerAutomationAction, +): Promise { + switch (action.type) { + case 'github_release_extract': { + await executeGitHubReleaseExtract(app, context, action); + return; + } + + case 'http_directory_extract': { + await executeHttpDirectoryExtract(app, context, action); + return; + } + + case 'write_file': { + const payload = + action.encoding === 'base64' + ? Buffer.from(action.data, 'base64') + : action.data; + + await daemonWriteFile(context.node, context.serverUuid, action.path, payload); + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + event: context.event, + actionId: action.id, + path: action.path, + }, + 'Automation action completed: write_file', + ); + return; + } + + case 'send_command': { + await daemonSendCommand(context.node, context.serverUuid, action.command); + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + event: context.event, + actionId: action.id, + command: action.command, + }, + 'Automation action completed: send_command', + ); + return; + } + + default: { + const unknownAction = action as { type?: unknown }; + throw new Error(`Unsupported automation action type: ${String(unknownAction.type)}`); + } + } +} + +export async function runServerAutomationEvent( + app: FastifyInstance, + context: ServerAutomationContext, +): Promise { + const workflows = asAutomationRules(context.automationRulesRaw, context.gameSlug) + .filter((rule) => isObject(rule)) + .filter((rule) => rule.event === context.event) + .filter((rule) => rule.enabled !== false) + .filter((rule) => Array.isArray(rule.actions) && rule.actions.length > 0); + + const result: ServerAutomationRunResult = { + workflowsMatched: workflows.length, + workflowsExecuted: 0, + workflowsSkipped: 0, + workflowsFailed: 0, + actionFailures: 0, + failures: [], + }; + + if (workflows.length === 0) { + return result; + } + + for (const workflow of workflows) { + const runOnce = workflow.runOncePerServer !== false; + + try { + if ( + runOnce && + !context.force && + await hasMarker(context.node, context.serverUuid, context.event, workflow.id) + ) { + result.workflowsSkipped += 1; + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + workflowId: workflow.id, + }, + 'Skipping automation workflow (already completed)', + ); + continue; + } + + for (const action of workflow.actions) { + try { + await executeAction(app, context, action); + } catch (error) { + const message = errorMessage(error); + result.actionFailures += 1; + result.failures.push({ + level: 'action', + workflowId: workflow.id, + actionId: action.id, + message, + }); + app.log.error( + { + err: error, + errorMessage: message, + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + workflowId: workflow.id, + actionId: action.id, + }, + 'Automation action failed', + ); + + if (workflow.continueOnError) { + continue; + } + + throw error; + } + } + + if (runOnce) { + await writeMarker(context.node, context.serverUuid, context.event, workflow.id, { + workflowId: workflow.id, + event: context.event, + completedAt: new Date().toISOString(), + }); + } + + app.log.info( + { + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + workflowId: workflow.id, + }, + 'Automation workflow completed', + ); + result.workflowsExecuted += 1; + } catch (error) { + const message = errorMessage(error); + result.workflowsFailed += 1; + result.failures.push({ + level: 'workflow', + workflowId: workflow.id, + message, + }); + app.log.error( + { + err: error, + errorMessage: message, + serverId: context.serverId, + serverUuid: context.serverUuid, + gameSlug: context.gameSlug, + event: context.event, + workflowId: workflow.id, + }, + 'Automation workflow failed', + ); + } + } + + return result; +} diff --git a/apps/api/src/routes/admin/index.ts b/apps/api/src/routes/admin/index.ts index 94f3b1c..591961a 100644 --- a/apps/api/src/routes/admin/index.ts +++ b/apps/api/src/routes/admin/index.ts @@ -61,6 +61,7 @@ export default async function adminRoutes(app: FastifyInstance) { stopCommand?: string; configFiles?: unknown[]; environmentVars?: unknown[]; + automationRules?: unknown[]; }; const existing = await app.db.query.games.findFirst({ @@ -74,6 +75,7 @@ export default async function adminRoutes(app: FastifyInstance) { ...body, configFiles: body.configFiles ?? [], environmentVars: body.environmentVars ?? [], + automationRules: body.automationRules ?? [], }) .returning(); diff --git a/apps/api/src/routes/admin/schemas.ts b/apps/api/src/routes/admin/schemas.ts index a5c185d..37f067c 100644 --- a/apps/api/src/routes/admin/schemas.ts +++ b/apps/api/src/routes/admin/schemas.ts @@ -10,6 +10,7 @@ export const CreateGameSchema = { stopCommand: Type.Optional(Type.String()), configFiles: Type.Optional(Type.Array(Type.Any())), environmentVars: Type.Optional(Type.Array(Type.Any())), + automationRules: Type.Optional(Type.Array(Type.Any())), }), }; @@ -22,6 +23,7 @@ export const UpdateGameSchema = { stopCommand: Type.Optional(Type.String()), configFiles: Type.Optional(Type.Array(Type.Any())), environmentVars: Type.Optional(Type.Array(Type.Any())), + automationRules: Type.Optional(Type.Array(Type.Any())), }), }; diff --git a/apps/api/src/routes/games/index.ts b/apps/api/src/routes/games/index.ts new file mode 100644 index 0000000..618b86f --- /dev/null +++ b/apps/api/src/routes/games/index.ts @@ -0,0 +1,16 @@ +import type { FastifyInstance } from 'fastify'; +import { games } from '@source/database'; + +export default async function gameRoutes(app: FastifyInstance) { + app.addHook('onRequest', app.authenticate); + + // GET /api/games + app.get('/', async () => { + const gameList = await app.db + .select() + .from(games) + .orderBy(games.name); + + return { data: gameList }; + }); +} diff --git a/apps/api/src/routes/internal/index.ts b/apps/api/src/routes/internal/index.ts index b0e52c7..07a4cee 100644 --- a/apps/api/src/routes/internal/index.ts +++ b/apps/api/src/routes/internal/index.ts @@ -1,8 +1,9 @@ import { Type } from '@sinclair/typebox'; import type { FastifyInstance, FastifyRequest } from 'fastify'; -import { eq } from 'drizzle-orm'; -import { nodes } from '@source/database'; +import { and, eq, lte } from 'drizzle-orm'; +import { nodes, scheduledTasks, servers } from '@source/database'; import { AppError } from '../../lib/errors.js'; +import { computeNextRun } from '../../lib/schedule-utils.js'; function extractBearerToken(authHeader?: string): string | null { if (!authHeader) return null; @@ -11,7 +12,10 @@ function extractBearerToken(authHeader?: string): string | null { return token; } -async function requireDaemonToken(app: FastifyInstance, request: FastifyRequest): Promise { +async function requireDaemonToken( + app: FastifyInstance, + request: FastifyRequest, +): Promise<{ id: string }> { const token = extractBearerToken( typeof request.headers.authorization === 'string' ? request.headers.authorization @@ -30,12 +34,44 @@ async function requireDaemonToken(app: FastifyInstance, request: FastifyRequest) if (!node) { throw AppError.unauthorized('Invalid daemon token', 'DAEMON_AUTH_INVALID'); } + + return node; } export default async function internalRoutes(app: FastifyInstance) { app.get('/schedules/due', async (request) => { - await requireDaemonToken(app, request); - return { tasks: [] }; + const node = await requireDaemonToken(app, request); + const now = new Date(); + + const dueTasks = await app.db + .select({ + id: scheduledTasks.id, + serverUuid: servers.uuid, + action: scheduledTasks.action, + payload: scheduledTasks.payload, + scheduleType: scheduledTasks.scheduleType, + isActive: scheduledTasks.isActive, + nextRunAt: scheduledTasks.nextRunAt, + }) + .from(scheduledTasks) + .innerJoin(servers, eq(scheduledTasks.serverId, servers.id)) + .where(and( + eq(servers.nodeId, node.id), + eq(scheduledTasks.isActive, true), + lte(scheduledTasks.nextRunAt, now), + )); + + return { + tasks: dueTasks.map((task) => ({ + id: task.id, + server_uuid: task.serverUuid, + action: task.action, + payload: task.payload, + schedule_type: task.scheduleType, + is_active: task.isActive, + next_run_at: task.nextRunAt?.toISOString() ?? null, + })), + }; }); app.post( @@ -48,8 +84,41 @@ export default async function internalRoutes(app: FastifyInstance) { }, }, async (request) => { - await requireDaemonToken(app, request); + const node = await requireDaemonToken(app, request); const { taskId } = request.params as { taskId: string }; + + const [task] = await app.db + .select({ + id: scheduledTasks.id, + isActive: scheduledTasks.isActive, + scheduleType: scheduledTasks.scheduleType, + scheduleData: scheduledTasks.scheduleData, + }) + .from(scheduledTasks) + .innerJoin(servers, eq(scheduledTasks.serverId, servers.id)) + .where(and( + eq(scheduledTasks.id, taskId), + eq(servers.nodeId, node.id), + )); + + if (!task) { + throw AppError.notFound('Scheduled task not found'); + } + + const now = new Date(); + const nextRunAt = task.isActive + ? computeNextRun(task.scheduleType, task.scheduleData as Record) + : null; + + await app.db + .update(scheduledTasks) + .set({ + lastRunAt: now, + nextRunAt, + updatedAt: now, + }) + .where(eq(scheduledTasks.id, taskId)); + return { success: true, taskId }; }, ); diff --git a/apps/api/src/routes/nodes/index.ts b/apps/api/src/routes/nodes/index.ts index ef5d09f..201c689 100644 --- a/apps/api/src/routes/nodes/index.ts +++ b/apps/api/src/routes/nodes/index.ts @@ -5,6 +5,11 @@ import { nodes, allocations, servers, games } from '@source/database'; import { AppError } from '../../lib/errors.js'; import { requirePermission } from '../../lib/permissions.js'; import { createAuditLog } from '../../lib/audit.js'; +import { + daemonGetNodeStats, + daemonGetNodeStatus, + type DaemonNodeConnection, +} from '../../lib/daemon.js'; import { NodeParamSchema, CreateNodeSchema, @@ -155,7 +160,7 @@ export default async function nodeRoutes(app: FastifyInstance) { }); // GET /api/organizations/:orgId/nodes/:nodeId/stats - // Returns basic stats from DB; real-time stats come from daemon via gRPC + // Returns real-time stats from daemon when available, with DB fallback. app.get('/:nodeId/stats', { schema: NodeParamSchema }, async (request) => { const { orgId, nodeId } = request.params as { orgId: string; nodeId: string }; await requirePermission(request, orgId, 'node.read'); @@ -171,17 +176,54 @@ export default async function nodeRoutes(app: FastifyInstance) { .where(eq(servers.nodeId, nodeId)); const totalServers = serverList.length; - const activeServers = serverList.filter((s) => s.status === 'running').length; + let activeServers = serverList.filter((s) => s.status === 'running').length; + let cpuPercent = 0; + let memoryUsed = 0; + let memoryTotal = node.memoryTotal; + let diskUsed = 0; + let diskTotal = node.diskTotal; + let uptime = 0; + + const daemonNode: DaemonNodeConnection = { + fqdn: node.fqdn, + grpcPort: node.grpcPort, + daemonToken: node.daemonToken, + }; + + try { + const [liveStats, liveStatus] = await Promise.all([ + daemonGetNodeStats(daemonNode), + daemonGetNodeStatus(daemonNode), + ]); + + cpuPercent = Number.isFinite(liveStats.cpuPercent) + ? Math.max(0, Math.min(100, liveStats.cpuPercent)) + : 0; + memoryUsed = Math.max(0, liveStats.memoryUsed); + memoryTotal = liveStats.memoryTotal > 0 ? liveStats.memoryTotal : node.memoryTotal; + diskUsed = Math.max(0, liveStats.diskUsed); + diskTotal = liveStats.diskTotal > 0 ? liveStats.diskTotal : node.diskTotal; + uptime = Math.max(0, liveStatus.uptimeSeconds); + + if (Number.isFinite(liveStatus.activeServers)) { + activeServers = Math.max(0, Math.min(totalServers, liveStatus.activeServers)); + } + } catch (error) { + request.log.warn( + { error, nodeId, orgId }, + 'Failed to fetch live node stats from daemon, returning fallback values', + ); + } return { - cpuPercent: 0, - memoryUsed: 0, - memoryTotal: node.memoryTotal, - diskUsed: 0, - diskTotal: node.diskTotal, + cpuPercent, + memoryUsed, + memoryTotal, + diskUsed, + diskTotal, activeServers, totalServers, - uptime: 0, + uptime, }; }); diff --git a/apps/api/src/routes/servers/backups.ts b/apps/api/src/routes/servers/backups.ts index fa28b22..65f91a7 100644 --- a/apps/api/src/routes/servers/backups.ts +++ b/apps/api/src/routes/servers/backups.ts @@ -1,10 +1,16 @@ import type { FastifyInstance } from 'fastify'; import { eq, and } from 'drizzle-orm'; import { Type } from '@sinclair/typebox'; -import { servers, backups } from '@source/database'; +import { servers, backups, nodes } from '@source/database'; import { AppError } from '../../lib/errors.js'; import { requirePermission } from '../../lib/permissions.js'; import { createAuditLog } from '../../lib/audit.js'; +import { + daemonCreateBackup, + daemonDeleteBackup, + daemonRestoreBackup, + type DaemonNodeConnection, +} from '../../lib/daemon.js'; const ParamSchema = { params: Type.Object({ @@ -54,10 +60,7 @@ export default async function backupRoutes(app: FastifyInstance) { const body = request.body as { name: string; isLocked?: boolean }; - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); + const serverContext = await getServerBackupContext(app, orgId, serverId); // Create backup record (pending — daemon will update when complete) const [backup] = await app.db @@ -69,12 +72,38 @@ export default async function backupRoutes(app: FastifyInstance) { }) .returning(); - // TODO: Send gRPC CreateBackup to daemon - // Daemon will: - // 1. tar+gz the server directory - // 2. Upload to @source/cdn - // 3. Callback to API with cdnPath, sizeBytes, checksum - // 4. API updates backup record with completedAt + if (!backup) { + throw new AppError(500, 'Failed to create backup record', 'BACKUP_CREATE_FAILED'); + } + + let completedBackup = backup; + try { + const daemonResult = await daemonCreateBackup( + serverContext.node, + serverContext.serverUuid, + backup.id, + ); + + if (!daemonResult.success) { + throw new Error('Daemon returned unsuccessful backup response'); + } + + const [updated] = await app.db + .update(backups) + .set({ + sizeBytes: daemonResult.sizeBytes, + checksum: daemonResult.checksum || null, + completedAt: new Date(), + }) + .where(eq(backups.id, backup.id)) + .returning(); + + completedBackup = updated ?? completedBackup; + } catch (error) { + request.log.error({ error, serverId, backupId: backup.id }, 'Failed to create backup on daemon'); + await app.db.delete(backups).where(eq(backups.id, backup.id)); + throw new AppError(502, 'Failed to create backup on daemon', 'DAEMON_BACKUP_CREATE_FAILED'); + } await createAuditLog(app.db, request, { organizationId: orgId, @@ -83,7 +112,7 @@ export default async function backupRoutes(app: FastifyInstance) { metadata: { name: body.name }, }); - return reply.code(201).send(backup); + return reply.code(201).send(completedBackup); }); // POST /backups/:backupId/restore — restore a backup @@ -95,10 +124,7 @@ export default async function backupRoutes(app: FastifyInstance) { }; await requirePermission(request, orgId, 'backup.restore'); - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); + const serverContext = await getServerBackupContext(app, orgId, serverId); const backup = await app.db.query.backups.findFirst({ where: and(eq(backups.id, backupId), eq(backups.serverId, serverId)), @@ -106,12 +132,20 @@ export default async function backupRoutes(app: FastifyInstance) { if (!backup) throw AppError.notFound('Backup not found'); if (!backup.completedAt) throw AppError.badRequest('Backup is not yet completed'); - // TODO: Send gRPC RestoreBackup to daemon - // Daemon will: - // 1. Stop the server - // 2. Download backup from @source/cdn - // 3. Extract tar.gz over server directory - // 4. Start the server + try { + await daemonRestoreBackup( + serverContext.node, + serverContext.serverUuid, + backup.id, + backup.cdnPath, + ); + } catch (error) { + request.log.error( + { error, serverId, backupId }, + 'Failed to restore backup on daemon', + ); + throw new AppError(502, 'Failed to restore backup on daemon', 'DAEMON_BACKUP_RESTORE_FAILED'); + } await createAuditLog(app.db, request, { organizationId: orgId, @@ -161,7 +195,17 @@ export default async function backupRoutes(app: FastifyInstance) { if (!backup) throw AppError.notFound('Backup not found'); if (backup.isLocked) throw AppError.badRequest('Cannot delete a locked backup'); - // TODO: Send gRPC DeleteBackup to daemon to remove from CDN + const serverContext = await getServerBackupContext(app, orgId, serverId); + + try { + await daemonDeleteBackup(serverContext.node, serverContext.serverUuid, backup.id); + } catch (error) { + request.log.error( + { error, serverId, backupId }, + 'Failed to delete backup on daemon', + ); + throw new AppError(502, 'Failed to delete backup on daemon', 'DAEMON_BACKUP_DELETE_FAILED'); + } await app.db.delete(backups).where(eq(backups.id, backupId)); @@ -175,3 +219,33 @@ export default async function backupRoutes(app: FastifyInstance) { return reply.code(204).send(); }); } + +async function getServerBackupContext( + app: FastifyInstance, + orgId: string, + serverId: string, +): Promise<{ serverUuid: string; node: DaemonNodeConnection }> { + const [server] = await app.db + .select({ + serverUuid: servers.uuid, + nodeFqdn: nodes.fqdn, + nodeGrpcPort: nodes.grpcPort, + nodeDaemonToken: nodes.daemonToken, + }) + .from(servers) + .innerJoin(nodes, eq(servers.nodeId, nodes.id)) + .where(and(eq(servers.id, serverId), eq(servers.organizationId, orgId))); + + if (!server) { + throw AppError.notFound('Server not found'); + } + + return { + serverUuid: server.serverUuid, + node: { + fqdn: server.nodeFqdn, + grpcPort: server.nodeGrpcPort, + daemonToken: server.nodeDaemonToken, + }, + }; +} diff --git a/apps/api/src/routes/servers/config.ts b/apps/api/src/routes/servers/config.ts index f75c151..84e3f23 100644 --- a/apps/api/src/routes/servers/config.ts +++ b/apps/api/src/routes/servers/config.ts @@ -111,21 +111,12 @@ export default async function configRoutes(app: FastifyInstance) { const { server, node, configFile } = await getServerConfig(app, orgId, serverId, configIndex); - // If editableKeys is set, only allow those keys - if (configFile.editableKeys && configFile.editableKeys.length > 0) { - const allowedKeys = new Set(configFile.editableKeys); - const invalidKeys = entries.filter((e) => !allowedKeys.has(e.key)); - if (invalidKeys.length > 0) { - throw AppError.badRequest( - `Keys not allowed: ${invalidKeys.map((k) => k.key).join(', ')}`, - ); - } - } - let originalContent: string | undefined; + let originalEntries: { key: string; value: string }[] = []; try { const current = await daemonReadFile(node, server.uuid, configFile.path); originalContent = current.data.toString('utf8'); + originalEntries = parseConfig(originalContent, configFile.parser as ConfigParser); } catch (error) { if (!isMissingConfigFileError(error)) { app.log.error({ error, serverId, path: configFile.path }, 'Failed to read existing config before write'); @@ -133,6 +124,22 @@ export default async function configRoutes(app: FastifyInstance) { } } + // If editableKeys is set, allow: + // 1) explicitly editable keys + // 2) keys that already exist in the current file + if (configFile.editableKeys && configFile.editableKeys.length > 0) { + const allowedKeys = new Set(configFile.editableKeys); + const existingKeys = new Set(originalEntries.map((entry) => entry.key)); + const invalidKeys = entries.filter( + (entry) => !allowedKeys.has(entry.key) && !existingKeys.has(entry.key), + ); + if (invalidKeys.length > 0) { + throw AppError.badRequest( + `Keys not allowed: ${invalidKeys.map((k) => k.key).join(', ')}`, + ); + } + } + const content = serializeConfig( entries, configFile.parser as ConfigParser, diff --git a/apps/api/src/routes/servers/files.ts b/apps/api/src/routes/servers/files.ts index 1ebbb10..df1aa9a 100644 --- a/apps/api/src/routes/servers/files.ts +++ b/apps/api/src/routes/servers/files.ts @@ -19,6 +19,17 @@ const FileParamSchema = { }), }; +function decodeBase64Payload(data: string): Buffer { + const normalized = data.trim(); + if (!normalized) return Buffer.alloc(0); + + if (!/^[A-Za-z0-9+/]*={0,2}$/.test(normalized) || normalized.length % 4 !== 0) { + throw AppError.badRequest('Invalid base64 payload'); + } + + return Buffer.from(normalized, 'base64'); +} + export default async function fileRoutes(app: FastifyInstance) { app.addHook('onRequest', app.authenticate); @@ -56,40 +67,61 @@ export default async function fileRoutes(app: FastifyInstance) { ...FileParamSchema, querystring: Type.Object({ path: Type.String({ minLength: 1 }), + encoding: Type.Optional(Type.Union([Type.Literal('utf8'), Type.Literal('base64')])), }), }, }, async (request) => { const { orgId, serverId } = request.params as { orgId: string; serverId: string }; - const { path } = request.query as { path: string }; + const { path, encoding } = request.query as { + path: string; + encoding?: 'utf8' | 'base64'; + }; await requirePermission(request, orgId, 'files.read'); const serverContext = await getServerContext(app, orgId, serverId); const content = await daemonReadFile(serverContext.node, serverContext.serverUuid, path); - return { data: content.data.toString('utf8') }; + const requestedEncoding = encoding === 'base64' ? 'base64' : 'utf8'; + + return { + data: + requestedEncoding === 'base64' + ? content.data.toString('base64') + : content.data.toString('utf8'), + encoding: requestedEncoding, + mimeType: content.mimeType, + }; }, ); app.post( '/write', { + bodyLimit: 128 * 1024 * 1024, schema: { ...FileParamSchema, body: Type.Object({ path: Type.String({ minLength: 1 }), data: Type.String(), + encoding: Type.Optional(Type.Union([Type.Literal('utf8'), Type.Literal('base64')])), }), }, }, async (request) => { const { orgId, serverId } = request.params as { orgId: string; serverId: string }; - const { path, data } = request.body as { path: string; data: string }; + const { path, data, encoding } = request.body as { + path: string; + data: string; + encoding?: 'utf8' | 'base64'; + }; await requirePermission(request, orgId, 'files.write'); const serverContext = await getServerContext(app, orgId, serverId); - await daemonWriteFile(serverContext.node, serverContext.serverUuid, path, data); + const payload = encoding === 'base64' ? decodeBase64Payload(data) : data; + + await daemonWriteFile(serverContext.node, serverContext.serverUuid, path, payload); return { success: true, path }; }, ); diff --git a/apps/api/src/routes/servers/index.ts b/apps/api/src/routes/servers/index.ts index aedcbb6..a56a923 100644 --- a/apps/api/src/routes/servers/index.ts +++ b/apps/api/src/routes/servers/index.ts @@ -1,19 +1,22 @@ import type { FastifyInstance } from 'fastify'; +import { Type } from '@sinclair/typebox'; import { eq, and, count } from 'drizzle-orm'; import { randomUUID } from 'crypto'; import { setTimeout as sleep } from 'timers/promises'; import { servers, allocations, nodes, games } from '@source/database'; -import type { PowerAction } from '@source/shared'; +import type { GameAutomationRule, PowerAction, ServerAutomationEvent } from '@source/shared'; import { AppError } from '../../lib/errors.js'; import { requirePermission } from '../../lib/permissions.js'; import { paginate, paginatedResponse, PaginationQuerySchema } from '../../lib/pagination.js'; import { createAuditLog } from '../../lib/audit.js'; +import { runServerAutomationEvent } from '../../lib/server-automation.js'; import { daemonCreateServer, daemonDeleteServer, daemonGetServerStatus, daemonSetPowerState, type DaemonNodeConnection, + type DaemonPortMapping, } from '../../lib/daemon.js'; import { ServerParamSchema, @@ -82,11 +85,27 @@ function buildDaemonEnvironment( return environment; } +function buildDaemonPorts(gameSlug: string, allocationPort: number, containerPort: number): DaemonPortMapping[] { + const slug = gameSlug.toLowerCase(); + if (slug === 'cs2' || slug === 'csgo') { + return [ + { host_port: allocationPort, container_port: containerPort, protocol: 'udp' }, + { host_port: allocationPort, container_port: containerPort, protocol: 'tcp' }, + ]; + } + if (slug === 'minecraft-bedrock') { + return [{ host_port: allocationPort, container_port: containerPort, protocol: 'udp' }]; + } + return [{ host_port: allocationPort, container_port: containerPort, protocol: 'tcp' }]; +} + async function syncServerInstallStatus( app: FastifyInstance, node: DaemonNodeConnection, serverId: string, serverUuid: string, + gameSlug: string, + automationRules: unknown, ): Promise { const maxAttempts = 120; const intervalMs = 5_000; @@ -116,6 +135,18 @@ async function syncServerInstallStatus( { serverId, serverUuid, status: mapped, attempt }, 'Synchronized install status from daemon', ); + + if (mapped === 'running' || mapped === 'stopped') { + void runServerAutomationEvent(app, { + serverId, + serverUuid, + gameSlug, + event: 'server.install.completed', + node, + automationRulesRaw: automationRules, + }); + } + return; } catch (error) { app.log.warn( @@ -267,13 +298,7 @@ export default async function serverRoutes(app: FastifyInstance) { cpu_limit: server.cpuLimit, startup_command: body.startupOverride ?? game.startupCommand, environment: buildDaemonEnvironment(game.environmentVars, body.environment, server.memoryLimit), - ports: [ - { - host_port: allocation.port, - container_port: game.defaultPort, - protocol: 'tcp' as const, - }, - ], + ports: buildDaemonPorts(game.slug, allocation.port, game.defaultPort), install_plugin_urls: [], }; @@ -281,6 +306,7 @@ export default async function serverRoutes(app: FastifyInstance) { const daemonResponse = await daemonCreateServer(nodeConnection, daemonRequest); const daemonStatus = mapDaemonStatus(daemonResponse.status) ?? 'installing'; const now = new Date(); + const automationRules = (game as { automationRules?: GameAutomationRule[] }).automationRules ?? []; const [updatedServer] = await app.db .update(servers) @@ -293,7 +319,23 @@ export default async function serverRoutes(app: FastifyInstance) { .returning(); if (daemonStatus === 'installing') { - void syncServerInstallStatus(app, nodeConnection, server.id, server.uuid); + void syncServerInstallStatus( + app, + nodeConnection, + server.id, + server.uuid, + game.slug, + automationRules, + ); + } else if (daemonStatus === 'running' || daemonStatus === 'stopped') { + void runServerAutomationEvent(app, { + serverId: server.id, + serverUuid: server.uuid, + gameSlug: game.slug, + event: 'server.install.completed', + node: nodeConnection, + automationRulesRaw: automationRules, + }); } await createAuditLog(app.db, request, { @@ -319,6 +361,83 @@ export default async function serverRoutes(app: FastifyInstance) { } }); + // GET /api/organizations/:orgId/servers/:serverId + app.post( + '/:serverId/automation/run', + { + schema: { + ...ServerParamSchema, + body: Type.Object({ + event: Type.Union([ + Type.Literal('server.created'), + Type.Literal('server.install.completed'), + Type.Literal('server.power.started'), + Type.Literal('server.power.stopped'), + ]), + force: Type.Optional(Type.Boolean({ default: false })), + }), + }, + }, + async (request) => { + const { orgId, serverId } = request.params as { orgId: string; serverId: string }; + const { event, force } = request.body as { + event: ServerAutomationEvent; + force?: boolean; + }; + + await requirePermission(request, orgId, 'server.update'); + + const [server] = await app.db + .select({ + id: servers.id, + uuid: servers.uuid, + gameSlug: games.slug, + automationRules: games.automationRules, + nodeFqdn: nodes.fqdn, + nodeGrpcPort: nodes.grpcPort, + nodeDaemonToken: nodes.daemonToken, + }) + .from(servers) + .innerJoin(games, eq(servers.gameId, games.id)) + .innerJoin(nodes, eq(servers.nodeId, nodes.id)) + .where(and(eq(servers.id, serverId), eq(servers.organizationId, orgId))); + + if (!server) throw AppError.notFound('Server not found'); + + const result = await runServerAutomationEvent(app, { + serverId: server.id, + serverUuid: server.uuid, + gameSlug: server.gameSlug, + event, + force: force ?? false, + node: { + fqdn: server.nodeFqdn, + grpcPort: server.nodeGrpcPort, + daemonToken: server.nodeDaemonToken, + }, + automationRulesRaw: server.automationRules, + }); + + await createAuditLog(app.db, request, { + organizationId: orgId, + serverId, + action: 'server.automation.run', + metadata: { + event, + force: force ?? false, + result, + }, + }); + + return { + success: true, + event, + force: force ?? false, + result, + }; + }, + ); + // GET /api/organizations/:orgId/servers/:serverId app.get('/:serverId', { schema: ServerParamSchema }, async (request) => { const { orgId, serverId } = request.params as { orgId: string; serverId: string }; @@ -503,6 +622,32 @@ export default async function serverRoutes(app: FastifyInstance) { }) .where(eq(servers.id, serverId)); + if (action === 'start' || action === 'restart') { + const [serverWithGame] = await app.db + .select({ + gameSlug: games.slug, + automationRules: games.automationRules, + }) + .from(servers) + .innerJoin(games, eq(servers.gameId, games.id)) + .where(eq(servers.id, serverId)); + + if (serverWithGame) { + void runServerAutomationEvent(app, { + serverId, + serverUuid: server.uuid, + gameSlug: serverWithGame.gameSlug, + event: 'server.power.started', + node: { + fqdn: server.nodeFqdn, + grpcPort: server.nodeGrpcPort, + daemonToken: server.nodeDaemonToken, + }, + automationRulesRaw: serverWithGame.automationRules, + }); + } + } + await createAuditLog(app.db, request, { organizationId: orgId, serverId, diff --git a/apps/api/src/routes/servers/plugins.ts b/apps/api/src/routes/servers/plugins.ts index b5bb75e..5735212 100644 --- a/apps/api/src/routes/servers/plugins.ts +++ b/apps/api/src/routes/servers/plugins.ts @@ -1,16 +1,24 @@ import type { FastifyInstance } from 'fastify'; import { eq, and } from 'drizzle-orm'; import { Type } from '@sinclair/typebox'; -import { servers, plugins, serverPlugins, games } from '@source/database'; +import { servers, plugins, serverPlugins, games, nodes } from '@source/database'; import { AppError } from '../../lib/errors.js'; import { requirePermission } from '../../lib/permissions.js'; import { createAuditLog } from '../../lib/audit.js'; +import { + daemonDeleteFiles, + daemonWriteFile, + type DaemonNodeConnection, +} from '../../lib/daemon.js'; import { searchSpigetPlugins, getSpigetResource, getSpigetDownloadUrl, } from '../../lib/spiget.js'; +const PLUGIN_DOWNLOAD_TIMEOUT_MS = 45_000; +const PLUGIN_DOWNLOAD_MAX_BYTES = 128 * 1024 * 1024; + const ParamSchema = { params: Type.Object({ orgId: Type.String({ format: 'uuid' }), @@ -18,6 +26,202 @@ const ParamSchema = { }), }; +interface ServerPluginContext { + serverId: string; + serverUuid: string; + gameId: string; + gameSlug: string; + gameName: string; + node: DaemonNodeConnection; +} + +interface PluginArtifactInput { + id: string; + slug: string; + downloadUrl: string | null; +} + +function toSlug(value: string): string { + return value + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/(^-|-$)/g, '') + .slice(0, 200); +} + +function pluginInstallDirectory(gameSlug: string): string { + const slug = gameSlug.toLowerCase(); + if (slug === 'cs2' || slug === 'csgo') return '/game/csgo/addons'; + if (slug === 'rust') return '/oxide/plugins'; + if (slug === 'minecraft-java') return '/plugins'; + return '/plugins'; +} + +function pluginFileExtension(downloadUrl: string): string { + try { + const pathname = new URL(downloadUrl).pathname; + const match = pathname.match(/\.([a-z0-9]{1,8})$/i); + if (match) { + return `.${match[1]!.toLowerCase()}`; + } + } catch { + // Ignore URL parse failures and use default extension below. + } + return '.jar'; +} + +function pluginFilePath(gameSlug: string, plugin: PluginArtifactInput): string | null { + if (!plugin.downloadUrl) return null; + const safeSlug = toSlug(plugin.slug) || 'plugin'; + const extension = pluginFileExtension(plugin.downloadUrl); + const directory = pluginInstallDirectory(gameSlug).replace(/\/+$/, ''); + return `${directory}/${safeSlug}-${plugin.id.slice(0, 8)}${extension}`; +} + +async function getServerPluginContext( + app: FastifyInstance, + orgId: string, + serverId: string, +): Promise { + const [row] = await app.db + .select({ + serverId: servers.id, + serverUuid: servers.uuid, + gameId: servers.gameId, + gameSlug: games.slug, + gameName: games.name, + nodeFqdn: nodes.fqdn, + nodeGrpcPort: nodes.grpcPort, + nodeDaemonToken: nodes.daemonToken, + }) + .from(servers) + .innerJoin(games, eq(servers.gameId, games.id)) + .innerJoin(nodes, eq(servers.nodeId, nodes.id)) + .where(and(eq(servers.id, serverId), eq(servers.organizationId, orgId))); + + if (!row) { + throw AppError.notFound('Server not found'); + } + + return { + serverId: row.serverId, + serverUuid: row.serverUuid, + gameId: row.gameId, + gameSlug: row.gameSlug, + gameName: row.gameName, + node: { + fqdn: row.nodeFqdn, + grpcPort: row.nodeGrpcPort, + daemonToken: row.nodeDaemonToken, + }, + }; +} + +async function getPluginForGame( + app: FastifyInstance, + pluginId: string, + gameId: string, +) { + const plugin = await app.db.query.plugins.findFirst({ + where: and(eq(plugins.id, pluginId), eq(plugins.gameId, gameId)), + }); + if (!plugin) { + throw AppError.notFound('Plugin not found for this game'); + } + return plugin; +} + +async function downloadPluginArtifact(downloadUrl: string): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), PLUGIN_DOWNLOAD_TIMEOUT_MS); + + try { + const res = await fetch(downloadUrl, { + headers: { 'User-Agent': 'GamePanel/1.0' }, + redirect: 'follow', + signal: controller.signal, + }); + + if (!res.ok) { + throw new AppError( + 502, + `Plugin download failed with HTTP ${res.status}`, + 'PLUGIN_DOWNLOAD_FAILED', + ); + } + + const contentLength = Number(res.headers.get('content-length') ?? '0'); + if (contentLength > PLUGIN_DOWNLOAD_MAX_BYTES) { + throw new AppError(413, 'Plugin artifact is too large', 'PLUGIN_TOO_LARGE'); + } + + const body = Buffer.from(await res.arrayBuffer()); + if (body.length === 0) { + throw AppError.badRequest('Plugin download returned empty content'); + } + if (body.length > PLUGIN_DOWNLOAD_MAX_BYTES) { + throw new AppError(413, 'Plugin artifact is too large', 'PLUGIN_TOO_LARGE'); + } + + return body; + } catch (error) { + if (error instanceof AppError) throw error; + throw new AppError( + 502, + 'Unable to download plugin artifact', + 'PLUGIN_DOWNLOAD_FAILED', + ); + } finally { + clearTimeout(timeout); + } +} + +async function installPluginForServer( + app: FastifyInstance, + context: ServerPluginContext, + plugin: PluginArtifactInput & { version: string | null }, + installedVersion: string | null, +) { + if (!plugin.downloadUrl) { + throw AppError.badRequest('Plugin has no download URL configured'); + } + + const existing = await app.db.query.serverPlugins.findFirst({ + where: and( + eq(serverPlugins.serverId, context.serverId), + eq(serverPlugins.pluginId, plugin.id), + ), + }); + if (existing) { + throw AppError.conflict('Plugin is already installed'); + } + + const artifact = await downloadPluginArtifact(plugin.downloadUrl); + const installPath = pluginFilePath(context.gameSlug, plugin); + if (!installPath) { + throw AppError.badRequest('Plugin install path could not be determined'); + } + + await daemonWriteFile(context.node, context.serverUuid, installPath, artifact); + + const [installed] = await app.db + .insert(serverPlugins) + .values({ + serverId: context.serverId, + pluginId: plugin.id, + installedVersion: installedVersion ?? plugin.version ?? null, + isActive: true, + }) + .returning(); + + if (!installed) { + throw new AppError(500, 'Failed to save plugin installation', 'PLUGIN_INSTALL_FAILED'); + } + + return { installed, installPath }; +} + export default async function pluginRoutes(app: FastifyInstance) { app.addHook('onRequest', app.authenticate); @@ -25,11 +229,7 @@ export default async function pluginRoutes(app: FastifyInstance) { app.get('/', { schema: ParamSchema }, async (request) => { const { orgId, serverId } = request.params as { orgId: string; serverId: string }; await requirePermission(request, orgId, 'plugin.read'); - - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); + await getServerPluginContext(app, orgId, serverId); const installed = await app.db .select({ @@ -51,6 +251,273 @@ export default async function pluginRoutes(app: FastifyInstance) { return { plugins: installed }; }); + // GET /plugins/marketplace — list game-specific marketplace plugins + app.get( + '/marketplace', + { + schema: { + ...ParamSchema, + querystring: Type.Object({ + q: Type.Optional(Type.String({ minLength: 1 })), + }), + }, + }, + async (request) => { + const { orgId, serverId } = request.params as { orgId: string; serverId: string }; + const { q } = request.query as { q?: string }; + await requirePermission(request, orgId, 'plugin.read'); + const context = await getServerPluginContext(app, orgId, serverId); + + const catalog = await app.db + .select({ + id: plugins.id, + name: plugins.name, + slug: plugins.slug, + description: plugins.description, + source: plugins.source, + externalId: plugins.externalId, + downloadUrl: plugins.downloadUrl, + version: plugins.version, + updatedAt: plugins.updatedAt, + }) + .from(plugins) + .where(eq(plugins.gameId, context.gameId)) + .orderBy(plugins.name); + + const installedRows = await app.db + .select({ + installId: serverPlugins.id, + pluginId: serverPlugins.pluginId, + installedVersion: serverPlugins.installedVersion, + isActive: serverPlugins.isActive, + installedAt: serverPlugins.installedAt, + }) + .from(serverPlugins) + .where(eq(serverPlugins.serverId, context.serverId)); + + const installedByPluginId = new Map( + installedRows.map((row) => [row.pluginId, row]), + ); + + const needle = q?.trim().toLowerCase(); + const filtered = needle + ? catalog.filter((plugin) => { + const name = plugin.name.toLowerCase(); + const description = (plugin.description ?? '').toLowerCase(); + return name.includes(needle) || description.includes(needle); + }) + : catalog; + + return { + game: { + id: context.gameId, + slug: context.gameSlug, + name: context.gameName, + }, + plugins: filtered.map((plugin) => { + const installed = installedByPluginId.get(plugin.id); + return { + ...plugin, + isInstalled: Boolean(installed), + installId: installed?.installId ?? null, + installedVersion: installed?.installedVersion ?? null, + isActive: installed?.isActive ?? false, + installedAt: installed?.installedAt ?? null, + }; + }), + }; + }, + ); + + // POST /plugins/marketplace — create a game-specific plugin entry + app.post( + '/marketplace', + { + schema: { + ...ParamSchema, + body: Type.Object({ + name: Type.String({ minLength: 1, maxLength: 255 }), + slug: Type.Optional(Type.String({ minLength: 1, maxLength: 255 })), + description: Type.Optional(Type.String()), + downloadUrl: Type.String({ format: 'uri' }), + version: Type.Optional(Type.String({ maxLength: 100 })), + }), + }, + }, + async (request, reply) => { + const { orgId, serverId } = request.params as { orgId: string; serverId: string }; + const { name, slug, description, downloadUrl, version } = request.body as { + name: string; + slug?: string; + description?: string; + downloadUrl: string; + version?: string; + }; + await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); + + const normalizedSlug = toSlug(slug ?? name); + if (!normalizedSlug) { + throw AppError.badRequest('Plugin slug is invalid'); + } + + const existing = await app.db.query.plugins.findFirst({ + where: and( + eq(plugins.gameId, context.gameId), + eq(plugins.slug, normalizedSlug), + ), + }); + if (existing) { + throw AppError.conflict('A plugin with this slug already exists for the game'); + } + + const [created] = await app.db + .insert(plugins) + .values({ + gameId: context.gameId, + name, + slug: normalizedSlug, + description: description ?? null, + source: 'manual', + downloadUrl, + version: version ?? null, + }) + .returning(); + + await createAuditLog(app.db, request, { + organizationId: orgId, + serverId, + action: 'plugin.marketplace.create', + metadata: { pluginId: created?.id, gameId: context.gameId, name }, + }); + + return reply.code(201).send(created); + }, + ); + + // PATCH /plugins/marketplace/:pluginId — update a marketplace plugin entry + app.patch( + '/marketplace/:pluginId', + { + schema: { + params: Type.Object({ + orgId: Type.String({ format: 'uuid' }), + serverId: Type.String({ format: 'uuid' }), + pluginId: Type.String({ format: 'uuid' }), + }), + body: Type.Object({ + name: Type.Optional(Type.String({ minLength: 1, maxLength: 255 })), + slug: Type.Optional(Type.String({ minLength: 1, maxLength: 255 })), + description: Type.Optional(Type.String()), + downloadUrl: Type.Optional(Type.String({ format: 'uri' })), + version: Type.Optional(Type.String({ maxLength: 100 })), + }), + }, + }, + async (request) => { + const { orgId, serverId, pluginId } = request.params as { + orgId: string; + serverId: string; + pluginId: string; + }; + const body = request.body as { + name?: string; + slug?: string; + description?: string; + downloadUrl?: string; + version?: string; + }; + await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); + const existing = await getPluginForGame(app, pluginId, context.gameId); + + const nextSlug = body.slug !== undefined + ? toSlug(body.slug) + : (body.name !== undefined ? toSlug(body.name) : existing.slug); + if (!nextSlug) { + throw AppError.badRequest('Plugin slug is invalid'); + } + + const duplicate = await app.db.query.plugins.findFirst({ + where: and( + eq(plugins.gameId, context.gameId), + eq(plugins.slug, nextSlug), + ), + }); + if (duplicate && duplicate.id !== existing.id) { + throw AppError.conflict('A plugin with this slug already exists for the game'); + } + + const [updated] = await app.db + .update(plugins) + .set({ + name: body.name ?? existing.name, + slug: nextSlug, + description: body.description ?? existing.description, + downloadUrl: body.downloadUrl ?? existing.downloadUrl, + version: body.version ?? existing.version, + updatedAt: new Date(), + }) + .where(eq(plugins.id, existing.id)) + .returning(); + + if (!updated) { + throw new AppError(500, 'Failed to update plugin', 'PLUGIN_UPDATE_FAILED'); + } + + await createAuditLog(app.db, request, { + organizationId: orgId, + serverId, + action: 'plugin.marketplace.update', + metadata: { pluginId: existing.id }, + }); + + return updated; + }, + ); + + // DELETE /plugins/marketplace/:pluginId — remove marketplace plugin entry + app.delete( + '/marketplace/:pluginId', + { + schema: { + params: Type.Object({ + orgId: Type.String({ format: 'uuid' }), + serverId: Type.String({ format: 'uuid' }), + pluginId: Type.String({ format: 'uuid' }), + }), + }, + }, + async (request, reply) => { + const { orgId, serverId, pluginId } = request.params as { + orgId: string; + serverId: string; + pluginId: string; + }; + await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); + const plugin = await getPluginForGame(app, pluginId, context.gameId); + + const installation = await app.db.query.serverPlugins.findFirst({ + where: eq(serverPlugins.pluginId, plugin.id), + }); + if (installation) { + throw AppError.conflict('Plugin is installed on at least one server'); + } + + await app.db.delete(plugins).where(eq(plugins.id, plugin.id)); + + await createAuditLog(app.db, request, { + organizationId: orgId, + serverId, + action: 'plugin.marketplace.delete', + metadata: { pluginId: plugin.id, name: plugin.name }, + }); + + return reply.code(204).send(); + }, + ); + // GET /plugins/search — search Spiget for Minecraft plugins app.get( '/search', @@ -68,18 +535,8 @@ export default async function pluginRoutes(app: FastifyInstance) { const { q, page } = request.query as { q: string; page?: number }; await requirePermission(request, orgId, 'plugin.manage'); - // Verify server exists and is Minecraft - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); - - const game = await app.db.query.games.findFirst({ - where: eq(games.id, server.gameId), - }); - if (!game) throw AppError.notFound('Game not found'); - - if (game.slug !== 'minecraft-java') { + const context = await getServerPluginContext(app, orgId, serverId); + if (context.gameSlug !== 'minecraft-java') { throw AppError.badRequest('Spiget search is only available for Minecraft: Java Edition'); } @@ -98,6 +555,51 @@ export default async function pluginRoutes(app: FastifyInstance) { }, ); + // POST /plugins/install/:pluginId — install from game marketplace + app.post( + '/install/:pluginId', + { + schema: { + params: Type.Object({ + orgId: Type.String({ format: 'uuid' }), + serverId: Type.String({ format: 'uuid' }), + pluginId: Type.String({ format: 'uuid' }), + }), + }, + }, + async (request) => { + const { orgId, serverId, pluginId } = request.params as { + orgId: string; + serverId: string; + pluginId: string; + }; + await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); + const plugin = await getPluginForGame(app, pluginId, context.gameId); + + const { installed, installPath } = await installPluginForServer( + app, + context, + { + id: plugin.id, + slug: plugin.slug, + downloadUrl: plugin.downloadUrl, + version: plugin.version, + }, + plugin.version, + ); + + await createAuditLog(app.db, request, { + organizationId: orgId, + serverId, + action: 'plugin.install', + metadata: { pluginId: plugin.id, name: plugin.name, source: 'marketplace', installPath }, + }); + + return installed; + }, + ); + // POST /plugins/install/spiget — install a plugin from Spiget app.post( '/install/spiget', @@ -114,24 +616,17 @@ export default async function pluginRoutes(app: FastifyInstance) { const { resourceId } = request.body as { resourceId: number }; await requirePermission(request, orgId, 'plugin.manage'); - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); + const context = await getServerPluginContext(app, orgId, serverId); + if (context.gameSlug !== 'minecraft-java') { + throw AppError.badRequest('Spiget install is only available for Minecraft: Java Edition'); + } - const game = await app.db.query.games.findFirst({ - where: eq(games.id, server.gameId), - }); - if (!game) throw AppError.notFound('Game not found'); - - // Fetch resource info from Spiget const resource = await getSpigetResource(resourceId); if (!resource) throw AppError.notFound('Spiget resource not found'); - // Create or find plugin entry let plugin = await app.db.query.plugins.findFirst({ where: and( - eq(plugins.gameId, game.id), + eq(plugins.gameId, context.gameId), eq(plugins.externalId, String(resourceId)), eq(plugins.source, 'spiget'), ), @@ -141,12 +636,9 @@ export default async function pluginRoutes(app: FastifyInstance) { const [created] = await app.db .insert(plugins) .values({ - gameId: game.id, + gameId: context.gameId, name: resource.name, - slug: resource.name - .toLowerCase() - .replace(/[^a-z0-9]+/g, '-') - .slice(0, 200), + slug: toSlug(resource.name), description: resource.tag || null, source: 'spiget', externalId: String(resourceId), @@ -157,41 +649,36 @@ export default async function pluginRoutes(app: FastifyInstance) { plugin = created!; } - // Check if already installed - const existing = await app.db.query.serverPlugins.findFirst({ - where: and( - eq(serverPlugins.serverId, serverId), - eq(serverPlugins.pluginId, plugin.id), - ), - }); - if (existing) throw AppError.conflict('Plugin is already installed'); - - // Install - const [installed] = await app.db - .insert(serverPlugins) - .values({ - serverId, - pluginId: plugin.id, - installedVersion: resource.version ? String(resource.version.id) : null, - isActive: true, - }) - .returning(); - - // TODO: Send gRPC command to daemon to download the plugin file to /data/plugins/ - // downloadUrl: getSpigetDownloadUrl(resourceId) + const { installed, installPath } = await installPluginForServer( + app, + context, + { + id: plugin.id, + slug: plugin.slug, + downloadUrl: plugin.downloadUrl, + version: plugin.version, + }, + resource.version ? String(resource.version.id) : plugin.version, + ); await createAuditLog(app.db, request, { organizationId: orgId, serverId, action: 'plugin.install', - metadata: { name: resource.name, source: 'spiget', resourceId }, + metadata: { + pluginId: plugin.id, + name: resource.name, + source: 'spiget', + resourceId, + installPath, + }, }); return installed; }, ); - // POST /plugins/install/manual — install a plugin manually (upload) + // POST /plugins/install/manual — register manually uploaded plugin file app.post( '/install/manual', { @@ -212,21 +699,14 @@ export default async function pluginRoutes(app: FastifyInstance) { version?: string; }; await requirePermission(request, orgId, 'plugin.manage'); - - const server = await app.db.query.servers.findFirst({ - where: and(eq(servers.id, serverId), eq(servers.organizationId, orgId)), - }); - if (!server) throw AppError.notFound('Server not found'); + const context = await getServerPluginContext(app, orgId, serverId); const [plugin] = await app.db .insert(plugins) .values({ - gameId: server.gameId, + gameId: context.gameId, name, - slug: name - .toLowerCase() - .replace(/[^a-z0-9]+/g, '-') - .slice(0, 200), + slug: toSlug(name), source: 'manual', version: version ?? null, }) @@ -246,7 +726,7 @@ export default async function pluginRoutes(app: FastifyInstance) { organizationId: orgId, serverId, action: 'plugin.install', - metadata: { name, source: 'manual', fileName }, + metadata: { pluginId: plugin?.id, name, source: 'manual', fileName }, }); return installed; @@ -272,24 +752,50 @@ export default async function pluginRoutes(app: FastifyInstance) { pluginInstallId: string; }; await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); - const installed = await app.db.query.serverPlugins.findFirst({ - where: and( + const [installed] = await app.db + .select({ + installId: serverPlugins.id, + pluginId: serverPlugins.pluginId, + pluginSlug: plugins.slug, + pluginDownloadUrl: plugins.downloadUrl, + }) + .from(serverPlugins) + .innerJoin(plugins, eq(serverPlugins.pluginId, plugins.id)) + .where(and( eq(serverPlugins.id, pluginInstallId), - eq(serverPlugins.serverId, serverId), - ), + eq(serverPlugins.serverId, context.serverId), + )); + + if (!installed) { + throw AppError.notFound('Plugin installation not found'); + } + + const uninstallPath = pluginFilePath(context.gameSlug, { + id: installed.pluginId, + slug: installed.pluginSlug, + downloadUrl: installed.pluginDownloadUrl, }); - if (!installed) throw AppError.notFound('Plugin installation not found'); + + if (uninstallPath) { + try { + await daemonDeleteFiles(context.node, context.serverUuid, [uninstallPath]); + } catch (error) { + request.log.warn( + { error, serverId, pluginInstallId, uninstallPath }, + 'Failed to delete plugin artifact from server filesystem', + ); + } + } await app.db.delete(serverPlugins).where(eq(serverPlugins.id, pluginInstallId)); - // TODO: Send gRPC to daemon to delete the plugin file from /data/plugins/ - await createAuditLog(app.db, request, { organizationId: orgId, serverId, action: 'plugin.uninstall', - metadata: { pluginInstallId }, + metadata: { pluginInstallId, pluginId: installed.pluginId }, }); return reply.code(204).send(); @@ -315,11 +821,12 @@ export default async function pluginRoutes(app: FastifyInstance) { pluginInstallId: string; }; await requirePermission(request, orgId, 'plugin.manage'); + const context = await getServerPluginContext(app, orgId, serverId); const installed = await app.db.query.serverPlugins.findFirst({ where: and( eq(serverPlugins.id, pluginInstallId), - eq(serverPlugins.serverId, serverId), + eq(serverPlugins.serverId, context.serverId), ), }); if (!installed) throw AppError.notFound('Plugin installation not found'); diff --git a/apps/api/src/routes/servers/schedules.ts b/apps/api/src/routes/servers/schedules.ts index 7057a8f..d45f192 100644 --- a/apps/api/src/routes/servers/schedules.ts +++ b/apps/api/src/routes/servers/schedules.ts @@ -1,11 +1,17 @@ import type { FastifyInstance } from 'fastify'; import { eq, and } from 'drizzle-orm'; import { Type } from '@sinclair/typebox'; -import { servers, scheduledTasks } from '@source/database'; +import { nodes, servers, scheduledTasks } from '@source/database'; +import type { PowerAction } from '@source/shared'; import { AppError } from '../../lib/errors.js'; import { requirePermission } from '../../lib/permissions.js'; import { createAuditLog } from '../../lib/audit.js'; import { computeNextRun } from '../../lib/schedule-utils.js'; +import { + daemonSendCommand, + daemonSetPowerState, + type DaemonNodeConnection, +} from '../../lib/daemon.js'; const ParamSchema = { params: Type.Object({ @@ -194,8 +200,18 @@ export default async function scheduleRoutes(app: FastifyInstance) { }); if (!task) throw AppError.notFound('Scheduled task not found'); - // TODO: Execute task action (send to daemon via gRPC) - // For now, just update lastRunAt and nextRunAt + if (task.action === 'command') { + const serverContext = await getServerContext(app, orgId, serverId); + await daemonSendCommand(serverContext.node, serverContext.serverUuid, task.payload); + } else if (task.action === 'power') { + const action = task.payload as PowerAction; + if (!['start', 'stop', 'restart', 'kill'].includes(action)) { + throw AppError.badRequest('Invalid power action in schedule payload'); + } + const serverContext = await getServerContext(app, orgId, serverId); + await daemonSetPowerState(serverContext.node, serverContext.serverUuid, action); + } + const nextRun = computeNextRun(task.scheduleType, task.scheduleData as Record); await app.db @@ -206,3 +222,32 @@ export default async function scheduleRoutes(app: FastifyInstance) { return { success: true, triggered: task.name }; }); } + +async function getServerContext(app: FastifyInstance, orgId: string, serverId: string): Promise<{ + serverUuid: string; + node: DaemonNodeConnection; +}> { + const [server] = await app.db + .select({ + uuid: servers.uuid, + nodeFqdn: nodes.fqdn, + nodeGrpcPort: nodes.grpcPort, + nodeDaemonToken: nodes.daemonToken, + }) + .from(servers) + .innerJoin(nodes, eq(servers.nodeId, nodes.id)) + .where(and(eq(servers.id, serverId), eq(servers.organizationId, orgId))); + + if (!server) { + throw AppError.notFound('Server not found'); + } + + return { + serverUuid: server.uuid, + node: { + fqdn: server.nodeFqdn, + grpcPort: server.nodeGrpcPort, + daemonToken: server.nodeDaemonToken, + }, + }; +} diff --git a/apps/daemon/Cargo.lock b/apps/daemon/Cargo.lock index d45fd78..938c37d 100644 --- a/apps/daemon/Cargo.lock +++ b/apps/daemon/Cargo.lock @@ -484,6 +484,7 @@ dependencies = [ "bollard", "flate2", "futures", + "libc", "prost", "prost-types", "reqwest", diff --git a/apps/daemon/Cargo.toml b/apps/daemon/Cargo.toml index aab4381..1fa6793 100644 --- a/apps/daemon/Cargo.toml +++ b/apps/daemon/Cargo.toml @@ -32,6 +32,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } # Error handling anyhow = "1" thiserror = "2" +libc = "0.2" # UUID uuid = { version = "1", features = ["v4"] } diff --git a/apps/daemon/src/docker/container.rs b/apps/daemon/src/docker/container.rs index c841e39..f290675 100644 --- a/apps/daemon/src/docker/container.rs +++ b/apps/daemon/src/docker/container.rs @@ -21,6 +21,17 @@ pub fn container_name(server_uuid: &str) -> String { format!("{}{}", CONTAINER_PREFIX, server_uuid) } +fn container_data_path_for_image(image: &str) -> &'static str { + let normalized = image.to_ascii_lowercase(); + if normalized.contains("cm2network/cs2") || normalized.contains("joedwards32/cs2") { + return "/home/steam/cs2-dedicated"; + } + if normalized.contains("cm2network/csgo") { + return "/home/steam/csgo-dedicated"; + } + "/data" +} + impl DockerManager { async fn run_exec(&self, container_name: &str, cmd: Vec) -> Result { let exec = self @@ -100,6 +111,7 @@ impl DockerManager { /// Create and configure a container for a game server. pub async fn create_container(&self, spec: &ServerSpec) -> Result { let name = container_name(&spec.uuid); + let data_mount_path = container_data_path_for_image(&spec.docker_image); // Build port bindings let mut port_bindings: HashMap>> = HashMap::new(); @@ -135,8 +147,10 @@ impl DockerManager { port_bindings: Some(port_bindings), network_mode: Some(self.network_name().to_string()), binds: Some(vec![format!( - "{}:/data", + "{}:{}", spec.data_path.display() + , + data_mount_path )]), ..Default::default() }; @@ -147,7 +161,13 @@ impl DockerManager { env: Some(env), exposed_ports: Some(exposed_ports), host_config: Some(host_config), - working_dir: Some("/data".to_string()), + // Preserve image default working directory when no custom startup command is set. + // Some game images rely on their built-in WORKDIR and entrypoint scripts. + working_dir: if spec.startup_command.is_empty() { + None + } else { + Some(data_mount_path.to_string()) + }, cmd: if spec.startup_command.is_empty() { None } else { @@ -268,6 +288,36 @@ impl DockerManager { } } + /// Read container runtime metadata (image + env vars) from Docker inspect. + pub async fn container_runtime_metadata( + &self, + server_uuid: &str, + ) -> Result<(String, HashMap)> { + let name = container_name(server_uuid); + let info = self.client().inspect_container(&name, None).await?; + + let image = info + .config + .as_ref() + .and_then(|cfg| cfg.image.clone()) + .unwrap_or_default(); + + let mut env_map = HashMap::new(); + if let Some(env_vars) = info + .config + .as_ref() + .and_then(|cfg| cfg.env.clone()) + { + for entry in env_vars { + if let Some((key, value)) = entry.split_once('=') { + env_map.insert(key.to_string(), value.to_string()); + } + } + } + + Ok((image, env_map)) + } + /// Stream container logs (stdout + stderr). Returns an owned stream. pub fn stream_logs( self: &Arc, diff --git a/apps/daemon/src/game/cs2.rs b/apps/daemon/src/game/cs2.rs index 010ff8b..0a70c6e 100644 --- a/apps/daemon/src/game/cs2.rs +++ b/apps/daemon/src/game/cs2.rs @@ -34,36 +34,28 @@ fn parse_status_response(response: &str) -> (Vec, u32) { for line in response.lines() { let trimmed = line.trim(); - // Parse max players from "players : X humans, Y bots (Z/M max)" - if trimmed.starts_with("players") && trimmed.contains("max") { - if let Some(max_str) = trimmed.split('/').last() { - if let Some(num) = max_str.split_whitespace().next() { - max_players = num.parse().unwrap_or(0); - } + // Parse max players from status line variants: + // "players : X humans, Y bots (Z/M max)" + // "players : X humans, Y bots (Z max)" + if trimmed.starts_with("players") { + if let Some(parsed_max) = parse_max_players_from_line(trimmed) { + max_players = parsed_max; } } - // Player table header: starts with # - if trimmed.starts_with("# userid") { + if trimmed.contains("---------players--------") || trimmed.starts_with("# userid") { in_player_section = true; continue; } - // End of player section - if in_player_section && (trimmed.is_empty() || trimmed.starts_with('#')) { - if trimmed.is_empty() { - in_player_section = false; - continue; - } + if in_player_section && (trimmed == "#end" || trimmed.starts_with("---------")) { + in_player_section = false; + continue; } - // Parse player lines: "# userid name steamid ..." - if in_player_section && trimmed.starts_with('#') { - let parts: Vec<&str> = trimmed.splitn(6, char::is_whitespace).collect(); - if parts.len() >= 4 { - let name = parts.get(2).unwrap_or(&"").trim_matches('"').to_string(); - let steamid = parts.get(3).unwrap_or(&"").to_string(); - + // Parse player lines for both old and current CS2 status formats. + if in_player_section { + if let Some((name, steamid)) = parse_player_line(trimmed) { players.push(Cs2Player { name, steamid, @@ -77,6 +69,62 @@ fn parse_status_response(response: &str) -> (Vec, u32) { (players, max_players) } +fn parse_max_players_from_line(line: &str) -> Option { + let start = line.find('(')?; + let end = line[start + 1..].find(')')? + start + 1; + let inside = &line[start + 1..end]; + + inside + .split(|c: char| !c.is_ascii_digit()) + .filter(|s| !s.is_empty()) + .filter_map(|s| s.parse::().ok()) + .max() +} + +fn parse_player_line(line: &str) -> Option<(String, String)> { + // Skip table/header rows. + if line.is_empty() + || line.starts_with("id ") + || line.contains("userid") + || line.contains("steamid") + || line.contains("adr name") + { + return None; + } + + // Legacy format: # 2 "Player" STEAM_... + if let Some(quote_start) = line.find('"') { + let quote_end = line[quote_start + 1..].find('"')? + quote_start + 1; + let name = line[quote_start + 1..quote_end].trim().to_string(); + if name.is_empty() { + return None; + } + + let rest = line[quote_end + 1..].trim(); + let steamid = rest.split_whitespace().next()?.to_string(); + if steamid.is_empty() { + return None; + } + + return Some((name, steamid)); + } + + // Current CS2 format: ... 'PlayerName' + let quote_end = line.rfind('\'')?; + let before_end = &line[..quote_end]; + let quote_start = before_end.rfind('\'')?; + if quote_start >= quote_end { + return None; + } + let name = line[quote_start + 1..quote_end].trim().to_string(); + if name.is_empty() { + return None; + } + + // New status output does not include steamid in player rows. + Some((name, String::new())) +} + #[cfg(test)] mod tests { use super::*; @@ -91,7 +139,28 @@ players : 2 humans, 0 bots (16/0 max) (not hibernating) # 3 "Player2" STEAM_1:0:67890 00:10 30 0 active 128000 "#; let (players, max) = parse_status_response(response); - assert_eq!(max, 0); // simplified parser + assert_eq!(max, 16); assert_eq!(players.len(), 2); } + + #[test] + fn test_parse_status_current_cs2_format() { + let response = r#"Server: Running [0.0.0.0:27015] +players : 1 humans, 2 bots (0 max) (not hibernating) (unreserved) +---------players-------- + id time ping loss state rate adr name +65535 [NoChan] 0 0 challenging 0unknown '' + 1 BOT 0 0 active 0 'Rezan' + 2 00:21 11 0 active 786432 212.154.6.153:57008 'hibna' + 3 BOT 0 0 active 0 'Squad' +#end +"#; + + let (players, max) = parse_status_response(response); + assert_eq!(max, 0); + assert_eq!(players.len(), 3); + assert_eq!(players[0].name, "Rezan"); + assert_eq!(players[1].name, "hibna"); + assert_eq!(players[2].name, "Squad"); + } } diff --git a/apps/daemon/src/grpc/service.rs b/apps/daemon/src/grpc/service.rs index 008e1e8..c10696b 100644 --- a/apps/daemon/src/grpc/service.rs +++ b/apps/daemon/src/grpc/service.rs @@ -2,6 +2,11 @@ use std::pin::Pin; use std::sync::Arc; use std::time::Instant; use std::collections::HashMap; +#[cfg(unix)] +use std::ffi::CString; +#[cfg(unix)] +use std::os::unix::ffi::OsStrExt; +use std::path::{Path, PathBuf}; use futures::StreamExt; use tokio_stream::wrappers::ReceiverStream; @@ -10,6 +15,7 @@ use tracing::{info, error, warn}; use crate::server::{ServerManager, PortMap}; use crate::filesystem::FileSystem; +use crate::backup::BackupManager; // Import generated protobuf types pub mod pb { @@ -21,14 +27,28 @@ use pb::*; pub struct DaemonServiceImpl { server_manager: Arc, + backup_manager: BackupManager, daemon_token: String, start_time: Instant, } impl DaemonServiceImpl { - pub fn new(server_manager: Arc, daemon_token: String) -> Self { + pub fn new( + server_manager: Arc, + daemon_token: String, + backup_root: PathBuf, + api_url: String, + ) -> Self { + let backup_manager = BackupManager::new( + server_manager.clone(), + backup_root, + api_url, + daemon_token.clone(), + ); + Self { server_manager, + backup_manager, daemon_token, start_time: Instant::now(), } @@ -51,6 +71,41 @@ impl DaemonServiceImpl { let data_path = self.server_manager.data_root().join(uuid); FileSystem::new(data_path) } + + async fn get_server_runtime( + &self, + uuid: &str, + ) -> Option<(String, HashMap)> { + if let Ok(spec) = self.server_manager.get_server(uuid).await { + return Some((spec.docker_image, spec.environment)); + } + + self.server_manager + .docker() + .container_runtime_metadata(uuid) + .await + .ok() + } + + fn env_value(env: &HashMap, keys: &[&str]) -> Option { + keys.iter() + .find_map(|k| env.get(*k)) + .map(|v| v.trim().to_string()) + .filter(|v| !v.is_empty()) + } + + fn env_u16(env: &HashMap, keys: &[&str]) -> Option { + Self::env_value(env, keys).and_then(|v| v.parse::().ok()) + } + + fn env_i32(env: &HashMap, keys: &[&str]) -> Option { + Self::env_value(env, keys).and_then(|v| v.parse::().ok()) + } + + fn cs2_rcon_password(env: &HashMap) -> String { + Self::env_value(env, &["CS2_RCONPW", "CS2_RCON_PASSWORD", "SRCDS_RCONPW", "RCON_PASSWORD"]) + .unwrap_or_else(|| "changeme".to_string()) + } } type GrpcStream = Pin> + Send>>; @@ -88,17 +143,12 @@ impl DaemonService for DaemonServiceImpl { self.check_auth(&request)?; let (tx, rx) = tokio::sync::mpsc::channel(32); + let data_root = self.server_manager.data_root().clone(); tokio::spawn(async move { + let mut previous_cpu = read_cpu_sample(); loop { - // Read system stats - let stats = NodeStats { - cpu_percent: 0.0, // TODO: real system stats - memory_used: 0, - memory_total: 0, - disk_used: 0, - disk_total: 0, - }; + let stats = read_node_stats(&data_root, &mut previous_cpu); if tx.send(Ok(stats)).await.is_err() { break; } @@ -281,6 +331,29 @@ impl DaemonService for DaemonServiceImpl { self.check_auth(&request)?; let req = request.into_inner(); + if let Some((image, env)) = self.get_server_runtime(&req.uuid).await { + let image = image.to_lowercase(); + if image.contains("cs2") || image.contains("csgo") { + let host = Self::env_value(&env, &["RCON_HOST"]) + .unwrap_or_else(|| "127.0.0.1".to_string()); + let port = Self::env_u16(&env, &["RCON_PORT", "CS2_PORT"]).unwrap_or(27015); + let password = Self::cs2_rcon_password(&env); + let address = format!("{}:{}", host, port); + + match crate::game::rcon::RconClient::connect(&address, &password).await { + Ok(mut client) => match client.command(&req.command).await { + Ok(_) => return Ok(Response::new(Empty {})), + Err(e) => { + warn!(uuid = %req.uuid, command = %req.command, error = %e, "CS2 RCON command failed"); + } + }, + Err(e) => { + warn!(uuid = %req.uuid, command = %req.command, error = %e, "CS2 RCON connect failed"); + } + } + } + } + self.server_manager .docker() .send_command(&req.uuid, &req.command) @@ -389,8 +462,20 @@ impl DaemonService for DaemonServiceImpl { request: Request, ) -> Result, Status> { self.check_auth(&request)?; - // TODO: implement backup creation - Err(Status::unimplemented("Not yet implemented")) + let req = request.into_inner(); + + let (_path, size_bytes, checksum) = self + .backup_manager + .create_backup(&req.server_uuid, &req.backup_id) + .await + .map_err(|e| Status::internal(format!("Failed to create backup: {e}")))?; + + Ok(Response::new(BackupResponse { + backup_id: req.backup_id, + size_bytes: size_bytes.min(i64::MAX as u64) as i64, + checksum, + success: true, + })) } async fn restore_backup( @@ -398,8 +483,21 @@ impl DaemonService for DaemonServiceImpl { request: Request, ) -> Result, Status> { self.check_auth(&request)?; - // TODO: implement backup restoration - Err(Status::unimplemented("Not yet implemented")) + let req = request.into_inner(); + + let cdn_path = if req.cdn_download_url.trim().is_empty() { + None + } else { + Some(req.cdn_download_url.as_str()) + }; + + self + .backup_manager + .restore_backup(&req.server_uuid, &req.backup_id, cdn_path) + .await + .map_err(|e| Status::internal(format!("Failed to restore backup: {e}")))?; + + Ok(Response::new(Empty {})) } async fn delete_backup( @@ -407,8 +505,15 @@ impl DaemonService for DaemonServiceImpl { request: Request, ) -> Result, Status> { self.check_auth(&request)?; - // TODO: implement backup deletion - Err(Status::unimplemented("Not yet implemented")) + let req = request.into_inner(); + + self + .backup_manager + .delete_backup(&req.server_uuid, &req.backup_id, None) + .await + .map_err(|e| Status::internal(format!("Failed to delete backup: {e}")))?; + + Ok(Response::new(Empty {})) } // === Stats === @@ -504,19 +609,13 @@ impl DaemonService for DaemonServiceImpl { .and_then(|v| v.parse::().ok()) .unwrap_or(25575); - // Try RCON-based player discovery for known games when runtime spec exists. - if let Ok(spec) = self.server_manager.get_server(&uuid).await { - let image = spec.docker_image.to_lowercase(); + // Try game-specific player discovery using runtime metadata (works even after daemon restart). + let mut max_from_runtime_env = 0; + if let Some((image, env)) = self.get_server_runtime(&uuid).await { + let image = image.to_lowercase(); if image.contains("minecraft") { - let password_from_env = spec - .environment - .get("RCON_PASSWORD") - .or_else(|| spec.environment.get("MCRCON_PASSWORD")) - .filter(|v| !v.trim().is_empty()); - - let password = password_from_env - .cloned() + let password = Self::env_value(&env, &["RCON_PASSWORD", "MCRCON_PASSWORD"]) .or_else(|| { if rcon_enabled_from_properties { rcon_password_from_properties.clone() @@ -526,16 +625,9 @@ impl DaemonService for DaemonServiceImpl { }); if let Some(password) = password { - let host = spec - .environment - .get("RCON_HOST") - .filter(|v| !v.trim().is_empty()) - .cloned() + let host = Self::env_value(&env, &["RCON_HOST"]) .unwrap_or_else(|| "127.0.0.1".to_string()); - let port = spec - .environment - .get("RCON_PORT") - .and_then(|v| v.parse::().ok()) + let port = Self::env_u16(&env, &["RCON_PORT"]) .unwrap_or(rcon_port_from_properties); let address = format!("{}:{}", host, port); @@ -560,43 +652,33 @@ impl DaemonService for DaemonServiceImpl { } } } else if image.contains("csgo") || image.contains("cs2") { - if let Some(password) = spec - .environment - .get("SRCDS_RCONPW") - .or_else(|| spec.environment.get("RCON_PASSWORD")) - .filter(|v| !v.trim().is_empty()) - { - let host = spec - .environment - .get("RCON_HOST") - .filter(|v| !v.trim().is_empty()) - .cloned() - .unwrap_or_else(|| "127.0.0.1".to_string()); - let port = spec - .environment - .get("RCON_PORT") - .and_then(|v| v.parse::().ok()) - .unwrap_or(27015); - let address = format!("{}:{}", host, port); + max_from_runtime_env = Self::env_i32(&env, &["CS2_MAXPLAYERS", "SRCDS_MAXPLAYERS"]) + .unwrap_or(0); - match crate::game::cs2::get_players(&address, password).await { - Ok((players, max)) => { - let mapped = players - .into_iter() - .map(|p| Player { - name: p.name, - uuid: p.steamid, - connected_at: 0, - }) - .collect(); - return Ok(Response::new(PlayerList { - players: mapped, - max_players: max as i32, - })); - } - Err(e) => { - warn!(uuid = %uuid, error = %e, "CS2 RCON player query failed"); - } + let host = Self::env_value(&env, &["RCON_HOST"]) + .unwrap_or_else(|| "127.0.0.1".to_string()); + let port = Self::env_u16(&env, &["RCON_PORT", "CS2_PORT"]).unwrap_or(27015); + let password = Self::cs2_rcon_password(&env); + let address = format!("{}:{}", host, port); + + match crate::game::cs2::get_players(&address, &password).await { + Ok((players, max)) => { + let mapped = players + .into_iter() + .map(|p| Player { + name: p.name, + uuid: p.steamid, + connected_at: 0, + }) + .collect(); + let max_players = if max > 0 { max as i32 } else { max_from_runtime_env }; + return Ok(Response::new(PlayerList { + players: mapped, + max_players, + })); + } + Err(e) => { + warn!(uuid = %uuid, error = %e, "CS2 RCON player query failed"); } } } @@ -624,11 +706,146 @@ impl DaemonService for DaemonServiceImpl { Ok(Response::new(PlayerList { players: vec![], - max_players: max_from_properties, + max_players: if max_from_runtime_env > 0 { + max_from_runtime_env + } else { + max_from_properties + }, })) } } +#[derive(Clone, Copy)] +struct CpuSample { + total: u64, + idle: u64, +} + +fn read_node_stats(data_root: &Path, previous_cpu: &mut Option) -> NodeStats { + let current_cpu = read_cpu_sample(); + let cpu_percent = match (*previous_cpu, current_cpu) { + (Some(prev), Some(current)) => calculate_node_cpu_percent(prev, current), + _ => 0.0, + }; + *previous_cpu = current_cpu; + + let (memory_used, memory_total) = read_memory_stats().unwrap_or((0, 0)); + let (disk_used, disk_total) = read_disk_stats(data_root).unwrap_or((0, 0)); + + NodeStats { + cpu_percent, + memory_used, + memory_total, + disk_used, + disk_total, + } +} + +fn read_cpu_sample() -> Option { + let content = std::fs::read_to_string("/proc/stat").ok()?; + let line = content.lines().next()?; + if !line.starts_with("cpu ") { + return None; + } + + let mut values = line + .split_whitespace() + .skip(1) + .filter_map(|value| value.parse::().ok()); + + let user = values.next()?; + let nice = values.next()?; + let system = values.next()?; + let idle = values.next()?; + let iowait = values.next().unwrap_or(0); + let irq = values.next().unwrap_or(0); + let softirq = values.next().unwrap_or(0); + let steal = values.next().unwrap_or(0); + + let total_idle = idle.saturating_add(iowait); + let total = user + .saturating_add(nice) + .saturating_add(system) + .saturating_add(total_idle) + .saturating_add(irq) + .saturating_add(softirq) + .saturating_add(steal); + + Some(CpuSample { + total, + idle: total_idle, + }) +} + +fn calculate_node_cpu_percent(previous: CpuSample, current: CpuSample) -> f64 { + let total_delta = current.total.saturating_sub(previous.total) as f64; + let idle_delta = current.idle.saturating_sub(previous.idle) as f64; + if total_delta <= 0.0 { + return 0.0; + } + ((total_delta - idle_delta) / total_delta * 100.0).clamp(0.0, 100.0) +} + +fn read_memory_stats() -> Option<(i64, i64)> { + let content = std::fs::read_to_string("/proc/meminfo").ok()?; + let mut total_kib: Option = None; + let mut available_kib: Option = None; + + for line in content.lines() { + if line.starts_with("MemTotal:") { + total_kib = line + .split_whitespace() + .nth(1) + .and_then(|value| value.parse::().ok()); + } else if line.starts_with("MemAvailable:") { + available_kib = line + .split_whitespace() + .nth(1) + .and_then(|value| value.parse::().ok()); + } + + if total_kib.is_some() && available_kib.is_some() { + break; + } + } + + let total_bytes = total_kib?.saturating_mul(1024); + let available_bytes = available_kib?.saturating_mul(1024); + let used_bytes = total_bytes.saturating_sub(available_bytes); + + Some(( + used_bytes.min(i64::MAX as u64) as i64, + total_bytes.min(i64::MAX as u64) as i64, + )) +} + +#[cfg(unix)] +fn read_disk_stats(path: &Path) -> Option<(i64, i64)> { + let c_path = CString::new(path.as_os_str().as_bytes()).ok()?; + let mut stats: libc::statvfs = unsafe { std::mem::zeroed() }; + if unsafe { libc::statvfs(c_path.as_ptr(), &mut stats) } != 0 { + return None; + } + + let block_size = if stats.f_frsize > 0 { + stats.f_frsize as u128 + } else { + stats.f_bsize as u128 + }; + + let total = block_size.saturating_mul(stats.f_blocks as u128); + let available = block_size.saturating_mul(stats.f_bavail as u128); + let used = total.saturating_sub(available); + let max = i64::MAX as u128; + + Some((used.min(max) as i64, total.min(max) as i64)) +} + +#[cfg(not(unix))] +fn read_disk_stats(_path: &Path) -> Option<(i64, i64)> { + None +} + /// Calculate CPU percentage from Docker stats. fn calculate_cpu_percent(stats: &bollard::container::Stats) -> f64 { let cpu_delta = stats.cpu_stats.cpu_usage.total_usage as f64 diff --git a/apps/daemon/src/main.rs b/apps/daemon/src/main.rs index 7fb25d3..83258cc 100644 --- a/apps/daemon/src/main.rs +++ b/apps/daemon/src/main.rs @@ -20,6 +20,8 @@ use crate::grpc::DaemonServiceImpl; use crate::grpc::service::pb::daemon_service_server::DaemonServiceServer; use crate::server::ServerManager; +const MAX_GRPC_MESSAGE_SIZE_BYTES: usize = 32 * 1024 * 1024; + #[tokio::main] async fn main() -> Result<()> { // Initialize logging @@ -47,6 +49,8 @@ async fn main() -> Result<()> { let daemon_service = DaemonServiceImpl::new( server_manager.clone(), config.node_token.clone(), + config.backup_path.clone(), + config.api_url.clone(), ); // Start gRPC server @@ -73,8 +77,12 @@ async fn main() -> Result<()> { info!("Scheduler initialized"); // Start serving + let daemon_service = DaemonServiceServer::new(daemon_service) + .max_decoding_message_size(MAX_GRPC_MESSAGE_SIZE_BYTES) + .max_encoding_message_size(MAX_GRPC_MESSAGE_SIZE_BYTES); + Server::builder() - .add_service(DaemonServiceServer::new(daemon_service)) + .add_service(daemon_service) .serve_with_shutdown(addr, async { tokio::signal::ctrl_c().await.ok(); info!("Shutdown signal received"); diff --git a/apps/daemon/src/server/manager.rs b/apps/daemon/src/server/manager.rs index ba0331e..fcc32a1 100644 --- a/apps/daemon/src/server/manager.rs +++ b/apps/daemon/src/server/manager.rs @@ -4,6 +4,8 @@ use std::sync::Arc; use tokio::sync::RwLock; use tracing::{info, error, warn}; use anyhow::Result; +#[cfg(unix)] +use std::os::unix::fs::PermissionsExt; use crate::config::DaemonConfig; use crate::docker::DockerManager; @@ -64,6 +66,15 @@ impl ServerManager { tokio::fs::create_dir_all(&data_path) .await .map_err(DaemonError::Io)?; + #[cfg(unix)] + { + // Containers may run with non-root users (e.g. steam uid 1000). + // Keep server directory writable to avoid install/start failures. + let permissions = std::fs::Permissions::from_mode(0o777); + tokio::fs::set_permissions(&data_path, permissions) + .await + .map_err(DaemonError::Io)?; + } let spec = ServerSpec { uuid: uuid.clone(), @@ -131,23 +142,36 @@ impl ServerManager { /// Start a server. pub async fn start_server(&self, uuid: &str) -> Result<(), DaemonError> { let mut managed = false; + let mut previous_state: Option = None; { let mut servers = self.servers.write().await; if let Some(spec) = servers.get_mut(uuid) { + // Recover from stale transitional state left by a previous failed start attempt. + if spec.state == ServerState::Starting { + warn!(uuid = %uuid, "Recovering stale starting state"); + spec.state = ServerState::Stopped; + } if !spec.can_transition_to(&ServerState::Starting) { return Err(DaemonError::InvalidStateTransition { current: spec.state.to_string(), requested: "starting".to_string(), }); } + previous_state = Some(spec.state.clone()); spec.state = ServerState::Starting; managed = true; } } - self.docker.start_container(uuid).await.map_err(|e| { - DaemonError::Internal(format!("Failed to start container: {}", e)) - })?; + if let Err(e) = self.docker.start_container(uuid).await { + if managed { + let mut servers = self.servers.write().await; + if let Some(spec) = servers.get_mut(uuid) { + spec.state = previous_state.unwrap_or(ServerState::Error); + } + } + return Err(DaemonError::Internal(format!("Failed to start container: {}", e))); + } if managed { let mut servers = self.servers.write().await; @@ -164,23 +188,36 @@ impl ServerManager { /// Stop a server. pub async fn stop_server(&self, uuid: &str) -> Result<(), DaemonError> { let mut managed = false; + let mut previous_state: Option = None; { let mut servers = self.servers.write().await; if let Some(spec) = servers.get_mut(uuid) { + // Recover from stale transitional state left by a previous failed stop attempt. + if spec.state == ServerState::Stopping { + warn!(uuid = %uuid, "Recovering stale stopping state"); + spec.state = ServerState::Running; + } if !spec.can_transition_to(&ServerState::Stopping) { return Err(DaemonError::InvalidStateTransition { current: spec.state.to_string(), requested: "stopping".to_string(), }); } + previous_state = Some(spec.state.clone()); spec.state = ServerState::Stopping; managed = true; } } - self.docker.stop_container(uuid, 30).await.map_err(|e| { - DaemonError::Internal(format!("Failed to stop container: {}", e)) - })?; + if let Err(e) = self.docker.stop_container(uuid, 30).await { + if managed { + let mut servers = self.servers.write().await; + if let Some(spec) = servers.get_mut(uuid) { + spec.state = previous_state.unwrap_or(ServerState::Error); + } + } + return Err(DaemonError::Internal(format!("Failed to stop container: {}", e))); + } if managed { let mut servers = self.servers.write().await; diff --git a/apps/web/src/lib/api.ts b/apps/web/src/lib/api.ts index 525a9b2..8f2780d 100644 --- a/apps/web/src/lib/api.ts +++ b/apps/web/src/lib/api.ts @@ -1,4 +1,9 @@ -const API_BASE = '/api'; +const RAW_API_BASE = ( + (import.meta.env.VITE_API_URL as string | undefined) ?? + (import.meta.env.VITE_API_BASE_URL as string | undefined) ?? + '/api' +).trim(); +const API_BASE = (RAW_API_BASE || '/api').replace(/\/+$/, ''); interface RequestOptions extends RequestInit { params?: Record; @@ -36,7 +41,11 @@ async function request(path: string, options: RequestOptions = {}): Promise(path: string, options: RequestOptions = {}): Promise null)); if (retry.status === 204) return undefined as T; return retry.json(); diff --git a/apps/web/src/pages/admin/games.tsx b/apps/web/src/pages/admin/games.tsx index da5d88a..f74441f 100644 --- a/apps/web/src/pages/admin/games.tsx +++ b/apps/web/src/pages/admin/games.tsx @@ -1,7 +1,8 @@ -import { useState } from 'react'; +import { useState, type KeyboardEvent as ReactKeyboardEvent } from 'react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; import { Plus, Gamepad2 } from 'lucide-react'; -import { api } from '@/lib/api'; +import { toast } from 'sonner'; +import { api, ApiError } from '@/lib/api'; import { Button } from '@/components/ui/button'; import { Input } from '@/components/ui/input'; import { Label } from '@/components/ui/label'; @@ -23,16 +24,55 @@ interface Game { dockerImage: string; defaultPort: number; startupCommand: string; + automationRules: unknown[]; } -interface PaginatedResponse { - data: T[]; - meta: { total: number }; +interface GamesResponse { + data: Game[]; +} + +function extractApiMessage(error: unknown, fallback: string): string { + if (error instanceof ApiError && error.data && typeof error.data === 'object') { + const maybeMessage = (error.data as { message?: unknown }).message; + if (typeof maybeMessage === 'string' && maybeMessage.trim()) { + return maybeMessage; + } + } + return fallback; +} + +function formatAutomationRules(value: unknown): string { + if (!Array.isArray(value)) { + return '[]'; + } + + try { + return JSON.stringify(value, null, 2); + } catch { + return '[]'; + } +} + +function parseAutomationRules(raw: string): { rules: unknown[]; error: string | null } { + try { + const parsed = JSON.parse(raw) as unknown; + if (!Array.isArray(parsed)) { + return { rules: [], error: 'Automation JSON must be an array.' }; + } + return { rules: parsed, error: null }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Invalid JSON'; + return { rules: [], error: message }; + } } export function AdminGamesPage() { const queryClient = useQueryClient(); const [open, setOpen] = useState(false); + const [automationOpen, setAutomationOpen] = useState(false); + const [selectedGame, setSelectedGame] = useState(null); + const [automationJson, setAutomationJson] = useState('[]'); + const [automationError, setAutomationError] = useState(null); const [name, setName] = useState(''); const [slug, setSlug] = useState(''); const [dockerImage, setDockerImage] = useState(''); @@ -41,7 +81,7 @@ export function AdminGamesPage() { const { data } = useQuery({ queryKey: ['admin-games'], - queryFn: () => api.get>('/admin/games'), + queryFn: () => api.get('/admin/games'), }); const createMutation = useMutation({ @@ -53,11 +93,70 @@ export function AdminGamesPage() { setSlug(''); setDockerImage(''); setStartupCommand(''); + toast.success('Game created'); + }, + onError: (error) => { + toast.error(extractApiMessage(error, 'Failed to create game')); + }, + }); + + const updateAutomationMutation = useMutation({ + mutationFn: ({ gameId, rules }: { gameId: string; rules: unknown[] }) => + api.patch(`/admin/games/${gameId}`, { automationRules: rules }), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ['admin-games'] }); + setAutomationOpen(false); + setSelectedGame(null); + setAutomationError(null); + toast.success('Automation rules updated'); + }, + onError: (error) => { + toast.error(extractApiMessage(error, 'Failed to save automation rules')); }, }); const games = data?.data ?? []; + const openAutomationDialog = (game: Game) => { + setSelectedGame(game); + setAutomationJson(formatAutomationRules(game.automationRules)); + setAutomationError(null); + setAutomationOpen(true); + }; + + const saveAutomationRules = () => { + if (!selectedGame) return; + + const parsed = parseAutomationRules(automationJson); + if (parsed.error) { + setAutomationError(parsed.error); + return; + } + + setAutomationError(null); + updateAutomationMutation.mutate({ + gameId: selectedGame.id, + rules: parsed.rules, + }); + }; + + const handleAutomationTabKey = (event: ReactKeyboardEvent) => { + if (event.key !== 'Tab') return; + + event.preventDefault(); + const textarea = event.currentTarget; + const selectionStart = textarea.selectionStart; + const selectionEnd = textarea.selectionEnd; + const nextValue = `${automationJson.slice(0, selectionStart)} ${automationJson.slice(selectionEnd)}`; + const nextCursor = selectionStart + 2; + + setAutomationJson(nextValue); + requestAnimationFrame(() => { + textarea.focus(); + textarea.setSelectionRange(nextCursor, nextCursor); + }); + }; + return (
@@ -142,11 +241,65 @@ export function AdminGamesPage() {

{game.dockerImage}

Port: {game.defaultPort}

+

Automation: {Array.isArray(game.automationRules) ? game.automationRules.length : 0} workflow

+ ))}
+ + { + setAutomationOpen(nextOpen); + if (!nextOpen) { + setSelectedGame(null); + setAutomationError(null); + } + }} + > + + + + Automation Rules + {selectedGame ? ` - ${selectedGame.name}` : ''} + + + +
+ +

+ Supported events: server.created, server.install.completed, server.power.started, server.power.stopped +

+