Refactor: Initial code split into includes directory for modularity

- Reorganized backend logic by moving API, authentication, Docker, status, and WebSocket handling into separate modules (api.js, auth.js, docker.js, status.js, websocket.js) within ./includes/
- Converted codebase to ES modules with import/export syntax for modern JavaScript
- Updated index.js to serve as main entry point, importing from ./includes/
- Reduced code duplication and improved readability with modularized functions
- Ensured full functionality preservation, including Docker stats and WebSocket communication
- Updated README to reflect new folder structure and ES module setup
This commit is contained in:
MCHost
2025-06-16 12:30:18 -04:00
parent fb0eb5ed44
commit d38e2ad1f1
8 changed files with 1073 additions and 1533 deletions

View File

@ -45,14 +45,27 @@ Before setting up HyperMC Panel, ensure you have the following installed:
2. **Install Dependencies** 2. **Install Dependencies**
```bash ```bash
npm install npm install express cors ws dockerode node-fetch unirest dotenv
``` ```
3. **Set Up Environment Variables** 3. **Set Up Environment Variables**
Create a `.env` file in the root directory and configure the required variables (see [Configuration](#configuration)). Create a `.env` file in the root directory and configure the required variables (see [Configuration](#configuration)).
4. **Start the Server** 4. **Update package.json**
Ensure your `package.json` includes `"type": "module"` to enable ES modules:
```json
{
"type": "module",
"scripts": {
"start": "node index.js"
}
}
```
5. **Start the Server**
```bash ```bash
npm start npm start
@ -149,14 +162,38 @@ Replace placeholder values (e.g., `https://api.example.com`, `your_secure_secret
│ │ └── styles.min.css # Compiled Tailwind CSS │ │ └── styles.min.css # Compiled Tailwind CSS
│ ├── favicon/ # Favicon assets │ ├── favicon/ # Favicon assets
│ └── index.html # Main HTML file │ └── index.html # Main HTML file
├── includes/
│ ├── api.js # API request handling
│ ├── auth.js # Authentication and login link logic
│ ├── docker.js # Docker-related functions
│ ├── status.js # Connection status checking functions
│ ├── websocket.js # WebSocket connection handling
├── index.js # Main server entry point
├── app.js # Frontend JavaScript logic ├── app.js # Frontend JavaScript logic
├── server.js # Backend server logic
├── .env # Environment variables (not tracked) ├── .env # Environment variables (not tracked)
├── package.json # Node.js dependencies ├── package.json # Node.js dependencies
└── README.md # Project documentation └── README.md # Project documentation
``` ```
## Contributing
Contributions are welcome! To contribute:
1. Fork the repository.
2. Create a new branch (`git checkout -b feature/your-feature`).
3. Make your changes and commit (`git commit -m "Add your feature"`).
4. Push to your branch (`git push origin feature/your-feature`).
5. Open a pull request on the [repository](https://git.ssh.surf/hypermc/panel).
Please ensure your code follows the project's coding style and includes appropriate tests.
## License
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
## Acknowledgments
- **Chart.js**: For beautiful, responsive charts. - **Chart.js**: For beautiful, responsive charts.
- **xterm.js**: For the terminal interface. - **xterm.js**: For the terminal interface.
- **Tailwind CSS**: For the sleek, modern UI. - **Tailwind CSS**: For the sleek, modern UI.
@ -166,5 +203,3 @@ Replace placeholder values (e.g., `https://api.example.com`, `your_secure_secret
## Contact ## Contact
For questions, issues, or suggestions, please open an issue on the [repository](https://git.ssh.surf/hypermc/panel) or contact the maintainer at [raven-scott.fyi](https://raven-scott.fyi). For questions, issues, or suggestions, please open an issue on the [repository](https://git.ssh.surf/hypermc/panel) or contact the maintainer at [raven-scott.fyi](https://raven-scott.fyi).
Happy server management with HyperMC Panel! 🚀

21
includes/api.js Normal file
View File

@ -0,0 +1,21 @@
import fetch from 'node-fetch';
export async function apiRequest(endpoint, apiKey, method = 'GET', body = null) {
const headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'x-my-mc-auth': apiKey
};
try {
const response = await fetch(`${process.env.API_URL}${endpoint}`, {
method,
headers,
body: body ? JSON.stringify(body) : null
});
const data = await response.json();
if (!response.ok) return { error: data.message || `HTTP ${response.status}` };
return data;
} catch (error) {
return { error: `Network error: ${error.message}` };
}
}

88
includes/auth.js Normal file
View File

@ -0,0 +1,88 @@
import unirest from 'unirest';
import { randomBytes } from 'crypto';
const temporaryLinks = new Map();
setInterval(() => {
const now = Date.now();
for (const [linkId, linkData] of temporaryLinks.entries()) {
if (linkData.expiresAt < now) temporaryLinks.delete(linkId);
}
}, parseInt(process.env.TEMP_LINKS_CLEANUP_INTERVAL_MS, 10));
export async function generateLoginLink(req, res) {
try {
const { secretKey, username } = req.body;
if (secretKey !== process.env.ADMIN_SECRET_KEY) return res.status(401).json({ error: 'Invalid secret key' });
if (!username) return res.status(400).json({ error: 'Username is required' });
const tokenResponse = await unirest
.post(process.env.AUTH_ENDPOINT)
.headers({ 'Accept': 'application/json', 'Content-Type': 'application/json' })
.send({ username, password: process.env.AUTH_PASSWORD });
if (!tokenResponse.body.token) return res.status(500).json({ error: 'Failed to generate API key' });
const apiKey = tokenResponse.body.token;
const linkId = randomBytes(parseInt(process.env.LINK_ID_BYTES, 10)).toString('hex');
const loginLink = `${process.env.AUTO_LOGIN_LINK_PREFIX}${linkId}`;
temporaryLinks.set(linkId, {
apiKey,
username,
expiresAt: Date.now() + parseInt(process.env.LINK_EXPIRY_SECONDS, 10) * 1000
});
setTimeout(() => temporaryLinks.delete(linkId), parseInt(process.env.LINK_EXPIRY_SECONDS, 10) * 1000);
res.json({ loginLink });
} catch (error) {
res.status(500).json({ error: 'Internal server error' });
}
}
export function handleAutoLogin(req, res) {
const { linkId } = req.params;
const linkData = temporaryLinks.get(linkId);
if (!linkData || linkData.expiresAt < Date.now()) {
temporaryLinks.delete(linkId);
return res.send(`
<html>
<head>
<meta http-equiv="refresh" content="3;url=${process.env.AUTO_LOGIN_REDIRECT_URL}">
<style>
body { display: flex; justify-content: center; align-items: center; height: 100vh; margin: 0; background-color: #111827; font-family: 'Arial', sans-serif; }
.notification { background-color: #1f2937; color: white; padding: 16px; border-radius: 8px; display: flex; flex-direction: column; align-items: center; gap: 12px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); max-width: 400px; width: 100%; }
h1 { font-size: 2.25em; color: white; text-align: center; margin: 0; }
.spinner { border: 4px solid rgba(255, 255, 255, 0.3); border-top: 4px solid #ffffff; border-radius: 50%; width: 24px; height: 24px; animation: spin 1s linear infinite; }
@keyframes spin { 0% { transform: rotate(0deg); } 100% { transform: rotate(360deg); } }
</style>
</head>
<body>
<div class="notification">
<span class="spinner"></span>
<h1>Login Expired.</h1>
<h1>Redirecting...</h1>
</div>
</body>
</html>
`);
}
temporaryLinks.delete(linkId);
res.send(`
<!DOCTYPE html>
<html>
<head>
<title>Auto Login</title>
<script>
localStorage.setItem('apiKey', '${linkData.apiKey}');
window.location.href = '/';
</script>
</head>
<body>
<p>Logging in...</p>
</body>
</html>
`);
}

188
includes/docker.js Normal file
View File

@ -0,0 +1,188 @@
import Docker from 'dockerode';
import { promisify } from 'util';
import { exec } from 'child_process';
import path from 'path';
const execPromise = promisify(exec);
export function setupDocker() {
return new Docker({ socketPath: process.env.DOCKER_SOCKET_PATH });
}
export async function getContainerStats(docker, containerName) {
try {
const container = docker.getContainer(containerName);
const [containers, info, stats] = await Promise.all([
docker.listContainers({ all: true }),
container.inspect(),
container.stats({ stream: false })
]);
if (!containers.some(c => c.Names.includes(`/${containerName}`))) {
return { error: `Container ${containerName} not found` };
}
const memoryUsage = stats.memory_stats.usage / 1024 / 1024;
const memoryLimit = stats.memory_stats.limit / 1024 / 1024 / 1024;
const memoryPercent = ((memoryUsage / (memoryLimit * 1024)) * 100).toFixed(2);
const cpuDelta = stats.cpu_stats.cpu_usage.total_usage - (stats.precpu_stats.cpu_usage?.total_usage || 0);
const systemDelta = stats.cpu_stats.system_cpu_usage - (stats.precpu_stats.system_cpu_usage || 0);
const cpuPercent = systemDelta > 0 ? ((cpuDelta / systemDelta) * stats.cpu_stats.online_cpus * 100).toFixed(2) : 0;
return {
status: info.State.Status,
memory: { raw: `${memoryUsage.toFixed(2)}MiB / ${memoryLimit.toFixed(2)}GiB`, percent: memoryPercent },
cpu: cpuPercent
};
} catch (error) {
console.error(`Docker stats error for ${containerName}:`, error.message);
return { error: `Failed to fetch stats for ${containerName}: ${error.message}` };
}
}
export async function streamContainerLogs(docker, ws, containerName, client) {
let isStreaming = true;
let isStartingStream = false;
const startLogStream = async () => {
if (isStartingStream) return false;
isStartingStream = true;
try {
const container = docker.getContainer(containerName);
const [containers, inspect] = await Promise.all([
docker.listContainers({ all: true }),
container.inspect()
]);
if (!containers.some(c => c.Names.includes(`/${containerName}`))) {
if (isStreaming) ws.send(JSON.stringify({ type: 'docker-logs', error: `Container ${containerName} not found` }));
return false;
}
if (inspect.State.Status !== 'running') {
if (isStreaming) ws.send(JSON.stringify({ type: 'docker-logs', error: `Container ${containerName} is not running` }));
return false;
}
if (client.logStream) {
client.logStream.removeAllListeners();
client.logStream.destroy();
client.logStream = null;
}
const logStream = await container.logs({
follow: true,
stdout: true,
stderr: true,
tail: parseInt(process.env.LOG_STREAM_TAIL_LINES, 10),
timestamps: true
});
logStream.on('data', (chunk) => {
if (isStreaming && client.logStream === logStream) {
ws.send(JSON.stringify({ type: 'docker-logs', data: { log: chunk.toString('utf8') } }));
}
});
logStream.on('error', (error) => {
if (isStreaming) ws.send(JSON.stringify({ type: 'docker-logs', error: `Log stream error: ${error.message}` }));
});
client.logStream = logStream;
return true;
} catch (error) {
if (isStreaming) ws.send(JSON.stringify({ type: 'docker-logs', error: `Failed to stream logs: ${error.message}` }));
return false;
} finally {
isStartingStream = false;
}
};
const monitorContainer = async () => {
try {
const container = docker.getContainer(containerName);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
if (client.logStream) {
client.logStream.removeAllListeners();
client.logStream.destroy();
client.logStream = null;
}
return false;
}
return true;
} catch (error) {
return false;
}
};
if (!(await startLogStream())) {
const monitorInterval = setInterval(async () => {
if (!isStreaming) return clearInterval(monitorInterval);
if (await monitorContainer() && !client.logStream && !isStartingStream) {
await startLogStream();
}
}, parseInt(process.env.LOG_STREAM_MONITOR_INTERVAL_MS, 10));
ws.on('close', () => {
isStreaming = false;
clearInterval(monitorInterval);
if (client.logStream) {
client.logStream.removeAllListeners();
client.logStream.destroy();
client.logStream = null;
}
});
return;
}
const monitorInterval = setInterval(async () => {
if (!isStreaming) return clearInterval(monitorInterval);
if (await monitorContainer() && !client.logStream && !isStartingStream) {
await startLogStream();
}
}, parseInt(process.env.LOG_STREAM_MONITOR_INTERVAL_MS, 10));
ws.on('close', () => {
isStreaming = false;
clearInterval(monitorInterval);
if (client.logStream) {
client.logStream.removeAllListeners();
client.logStream.destroy();
client.logStream = null;
}
});
}
export async function readServerProperties(docker, containerName) {
try {
const container = docker.getContainer(containerName);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
return { error: `Container ${containerName} is not running` };
}
const { stdout, stderr } = await execPromise(`docker exec ${containerName} bash -c "cat ${process.env.SERVER_PROPERTIES_PATH}"`);
if (stderr) return { error: 'Failed to read server.properties' };
return { content: stdout };
} catch (error) {
return { error: `Failed to read server.properties: ${error.message}` };
}
}
export async function writeServerProperties(docker, containerName, content) {
try {
const { randomBytes } = await import('crypto');
const tmpDir = process.env.TEMP_DIR;
const randomId = randomBytes(parseInt(process.env.TEMP_FILE_RANDOM_ID_BYTES, 10)).toString('hex');
const tmpFile = path.join(tmpDir, `server_properties_${randomId}.tmp`);
const containerFilePath = `${process.env.CONTAINER_TEMP_FILE_PREFIX}${randomId}.tmp`;
await (await import('fs')).promises.writeFile(tmpFile, content);
await execPromise(`docker cp ${tmpFile} ${containerName}:${containerFilePath}`);
await execPromise(`docker exec ${containerName} bash -c "mv ${containerFilePath} ${process.env.SERVER_PROPERTIES_PATH} && chown mc:mc ${process.env.SERVER_PROPERTIES_PATH}"`);
await (await import('fs')).promises.unlink(tmpFile).catch(err => console.error(`Error deleting temp file: ${err.message}`));
return { message: 'Server properties updated' };
} catch (error) {
return { error: `Failed to write server.properties: ${error.message}` };
}
}

49
includes/status.js Normal file
View File

@ -0,0 +1,49 @@
import { promisify } from 'util';
import { exec } from 'child_process';
import { Socket } from 'net';
const execPromise = promisify(exec);
export async function checkConnectionStatus(hostname, port) {
try {
const { stdout, stderr } = await execPromise(`${process.env.STATUS_CHECK_PATH} -host ${hostname} -port ${port}`);
if (stderr) return { isOnline: false, error: stderr };
return { isOnline: true, data: JSON.parse(stdout) };
} catch (error) {
return { isOnline: false, error: error.message };
}
}
export async function checkGeyserStatus(hostname, port) {
try {
const { stdout, stderr } = await execPromise(`${process.env.GEYSER_STATUS_CHECK_PATH} -host ${hostname} -port ${port}`);
if (stderr) return { isOnline: false, error: stderr };
return { isOnline: true, data: JSON.parse(stdout) };
} catch (error) {
return { isOnline: false, error: error.message };
}
}
export async function checkSftpStatus(hostname, port) {
return new Promise((resolve) => {
const socket = new Socket();
const timeout = parseInt(process.env.SFTP_CONNECTION_TIMEOUT_MS, 10);
socket.setTimeout(timeout);
socket.on('connect', () => {
socket.destroy();
resolve({ isOnline: true });
});
socket.on('timeout', () => {
socket.destroy();
resolve({ isOnline: false, error: 'Connection timed out' });
});
socket.on('error', (error) => {
socket.destroy();
resolve({ isOnline: false, error: error.message });
});
socket.connect(port, process.env.SFTP_HOSTNAME);
});
}

667
includes/websocket.js Normal file
View File

@ -0,0 +1,667 @@
import { URLSearchParams } from 'url';
import { getContainerStats, streamContainerLogs, readServerProperties, writeServerProperties } from './docker.js';
import { checkConnectionStatus, checkGeyserStatus, checkSftpStatus } from './status.js';
import { apiRequest } from './api.js';
const clients = new Map();
const staticEndpoints = ['log', 'website', 'map', 'my-link-cache', 'my-geyser-cache', 'my-sftp-cache', 'my-link', 'my-geyser-link', 'my-sftp'];
const dynamicEndpoints = ['hello', 'time', 'mod-list'];
async function fetchAndSendUpdate(ws, endpoint, client, docker) {
if (['mod-list', 'list-players'].includes(endpoint) && client.user !== 'Unknown') {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
ws.send(JSON.stringify({ type: endpoint, error: `Container ${client.user} is not running` }));
return;
}
} catch (error) {
ws.send(JSON.stringify({ type: endpoint, error: `Failed to check container status: ${error.message}` }));
return;
}
}
if (endpoint === 'time' && client.cache['time']) {
ws.send(JSON.stringify({ type: endpoint, data: client.cache['time'] }));
return;
}
const response = await apiRequest(`/${endpoint}`, client.apiKey);
if (!response.error) {
if (endpoint === 'time') client.cache['time'] = response;
if (endpoint === 'my-link-cache') {
client.cache['my-link-cache'] = response;
if (client.subscriptions.has('my-link-cache') && client.user !== 'Unknown') {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running' && response.hostname && response.port) {
const status = await checkConnectionStatus(response.hostname, response.port);
ws.send(JSON.stringify({ type: 'connection-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'connection-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
ws.send(JSON.stringify({ type: 'connection-status', error: `Failed to check container status: ${error.message}` }));
}
}
}
if (endpoint === 'my-geyser-cache') {
client.cache['my-geyser-cache'] = response;
if (client.subscriptions.has('my-geyser-cache') && client.user !== 'Unknown') {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running' && response.hostname && response.port) {
const status = await checkGeyserStatus(response.hostname, response.port);
ws.send(JSON.stringify({ type: 'geyser-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'geyser-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
ws.send(JSON.stringify({ type: 'geyser-status', error: `Failed to check container status: ${error.message}` }));
}
}
}
if (endpoint === 'my-sftp-cache') {
client.cache['my-sftp-cache'] = response;
if (client.subscriptions.has('my-sftp-cache') && client.user !== 'Unknown') {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running' && response.hostname && response.port) {
const status = await checkSftpStatus(response.hostname, response.port);
ws.send(JSON.stringify({ type: 'sftp-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'sftp-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
ws.send(JSON.stringify({ type: 'sftp-status', error: `Failed to check container status: ${error.message}` }));
}
}
}
ws.send(JSON.stringify({ type: endpoint, data: response }));
} else {
ws.send(JSON.stringify({ type: endpoint, error: response.error }));
}
}
async function manageStatusChecks(ws, client, user, docker) {
try {
const container = docker.getContainer(user);
const inspect = await container.inspect();
const isRunning = inspect.State.Status === 'running';
client.intervals.forEach(clearInterval);
client.intervals = [];
['connectionStatusInterval', 'geyserStatusInterval', 'sftpStatusInterval', 'statusCheckMonitorInterval'].forEach((key) => {
if (client[key]) clearInterval(client[key]);
client[key] = null;
});
if (!isRunning || user === 'Unknown') {
['my-link-cache', 'my-geyser-cache', 'my-sftp-cache'].forEach((sub) => {
if (client.subscriptions.has(sub)) {
ws.send(JSON.stringify({ type: sub.replace('-cache', '-status'), error: `Container ${user} is not running or user unknown` }));
}
});
if (!isRunning && (client.subscriptions.has('my-link-cache') || client.subscriptions.has('my-geyser-cache') || client.subscriptions.has('my-sftp-cache')) && user !== 'Unknown') {
console.log(`Starting container status monitor for ${user}`);
client.statusCheckMonitorInterval = setInterval(async () => {
try {
const monitorContainer = docker.getContainer(user);
const monitorInspect = await monitorContainer.inspect();
if (monitorInspect.State.Status === 'running') {
console.log(`Container ${user} is running, restarting status checks`);
await manageStatusChecks(ws, client, user, docker);
clearInterval(client.statusCheckMonitorInterval);
client.statusCheckMonitorInterval = null;
}
} catch (error) {
console.error(`Error monitoring container ${user}:`, error.message);
}
}, parseInt(process.env.CONTAINER_STATUS_MONITOR_INTERVAL_MS, 10));
client.intervals.push(client.statusCheckMonitorInterval);
}
return;
}
const statusChecks = [
{
subscription: 'my-link-cache',
intervalKey: 'connectionStatusInterval',
intervalMs: process.env.CONNECTION_STATUS_INTERVAL_MS,
checkFn: checkConnectionStatus,
cacheKey: 'my-link-cache',
statusType: 'connection-status'
},
{
subscription: 'my-geyser-cache',
intervalKey: 'geyserStatusInterval',
intervalMs: process.env.GEYSER_STATUS_INTERVAL_MS,
checkFn: checkGeyserStatus,
cacheKey: 'my-geyser-cache',
statusType: 'geyser-status'
},
{
subscription: 'my-sftp-cache',
intervalKey: 'sftpStatusInterval',
intervalMs: process.env.SFTP_STATUS_INTERVAL_MS,
checkFn: checkSftpStatus,
cacheKey: 'my-sftp-cache',
statusType: 'sftp-status'
}
];
for (const { subscription, intervalKey, intervalMs, checkFn, cacheKey, statusType } of statusChecks) {
if (client.subscriptions.has(subscription)) {
console.log(`Starting ${statusType} check for ${user}`);
client[intervalKey] = setInterval(async () => {
try {
const containerCheck = docker.getContainer(user);
const inspectCheck = await containerCheck.inspect();
if (inspectCheck.State.Status !== 'running') {
console.log(`Container ${user} stopped, clearing ${statusType} interval`);
clearInterval(client[intervalKey]);
client[intervalKey] = null;
return;
}
const data = client.cache[cacheKey];
if (data && data.hostname && data.port) {
const status = await checkFn(data.hostname, data.port);
ws.send(JSON.stringify({ type: statusType, data: { isOnline: status.isOnline } }));
}
} catch (error) {
console.error(`Error in ${statusType} check for ${user}:`, error.message);
ws.send(JSON.stringify({ type: statusType, data: { isOnline: false, error: error.message } }));
}
}, parseInt(intervalMs, 10));
client.intervals.push(client[intervalKey]);
const data = client.cache[cacheKey];
if (data && data.hostname && data.port) {
console.log(`Performing initial ${statusType} check for ${user}`);
const status = await checkFn(data.hostname, data.port);
ws.send(JSON.stringify({ type: statusType, data: { isOnline: status.isOnline } }));
}
}
}
} catch (error) {
console.error(`Error managing status checks for ${user}:`, error.message);
}
}
export function handleWebSocket(ws, req, docker) {
const urlParams = new URLSearchParams(req.url.split('?')[1]);
const apiKey = urlParams.get('apiKey');
if (!apiKey) {
console.error('WebSocket connection rejected: Missing API key');
ws.send(JSON.stringify({ error: 'API key required' }));
ws.close();
return;
}
const client = {
apiKey,
subscriptions: new Set(),
user: null,
intervals: [],
logStream: null,
cache: {},
connectionStatusInterval: null,
geyserStatusInterval: null,
sftpStatusInterval: null,
statusCheckMonitorInterval: null
};
clients.set(ws, client);
console.log('WebSocket client registered with API key');
ws.on('message', async (message) => {
try {
const data = JSON.parse(message.toString());
if (data.type === 'subscribe') {
data.endpoints.forEach(endpoint => {
client.subscriptions.add(endpoint);
console.log(`Client subscribed to ${endpoint}`);
});
console.log(`Client subscriptions: ${Array.from(client.subscriptions)}`);
let hello = client.cache['hello'] || await apiRequest('/hello', client.apiKey);
if (!client.cache['hello'] && !hello.error) client.cache['hello'] = hello;
if (hello.error) {
console.error('Failed to fetch /hello:', hello.error);
ws.send(JSON.stringify({ type: 'hello', error: hello.error }));
return;
}
if (hello.message && typeof hello.message === 'string') {
const user = hello.message.split(', ')[1]?.replace('!', '').trim() || 'Unknown';
client.user = user;
console.log(`User identified: ${user}`);
ws.send(JSON.stringify({ type: 'hello', data: hello }));
if (client.subscriptions.has('docker') && user !== 'Unknown') {
try {
const container = docker.getContainer(user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Starting docker stats interval for ${user}`);
client.intervals.push(setInterval(async () => {
try {
console.log(`Fetching docker stats for ${user}`);
const stats = await getContainerStats(docker, user);
if (stats.error) {
console.error(`Error fetching stats for ${user}: ${stats.error}`);
ws.send(JSON.stringify({ type: 'docker', error: stats.error }));
} else {
console.log(`Sending docker stats for ${user}:`, stats);
ws.send(JSON.stringify({ type: 'docker', data: { ...stats, user } }));
}
} catch (error) {
console.error(`Error in docker stats interval for ${user}:`, error.message);
ws.send(JSON.stringify({ type: 'docker', error: `Failed to fetch stats: ${error.message}` }));
}
}, parseInt(process.env.DOCKER_STATS_INTERVAL_MS, 10)));
// Send initial stats immediately
console.log(`Sending initial docker stats for ${user}`);
const initialStats = await getContainerStats(docker, user);
ws.send(JSON.stringify({ type: 'docker', data: { ...initialStats, user } }));
} else {
console.log(`Container ${user} is not running, skipping docker stats interval`);
ws.send(JSON.stringify({ type: 'docker', error: `Container ${user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for docker stats for ${user}:`, error.message);
ws.send(JSON.stringify({ type: 'docker', error: `Failed to check container status: ${error.message}` }));
}
} else if (user === 'Unknown') {
console.warn('Cannot start docker stats interval: User is Unknown');
ws.send(JSON.stringify({ type: 'docker', error: 'User not identified' }));
}
if (client.subscriptions.has('docker-logs') && user !== 'Unknown') {
console.log(`Starting docker logs stream for ${user}`);
await streamContainerLogs(docker, ws, user, client);
} else if (user === 'Unknown') {
console.warn('Cannot start docker logs stream: User is Unknown');
ws.send(JSON.stringify({ type: 'docker-logs', error: 'User not identified' }));
}
await manageStatusChecks(ws, client, user, docker);
await Promise.all([
...staticEndpoints.filter(e => client.subscriptions.has(e)).map(e => fetchAndSendUpdate(ws, e, client, docker)),
...dynamicEndpoints.filter(e => client.subscriptions.has(e)).map(async (e) => {
if (e === 'hello' && client.cache['hello']) {
ws.send(JSON.stringify({ type: 'hello', data: client.cache['hello'] }));
return;
}
if (e === 'time' && client.cache['time']) {
ws.send(JSON.stringify({ type: 'time', data: client.cache['time'] }));
return;
}
await fetchAndSendUpdate(ws, e, client, docker);
}),
client.subscriptions.has('list-players') ? fetchAndSendUpdate(ws, 'list-players', client, docker) : null
].filter(Boolean));
client.intervals.push(setInterval(async () => {
try {
for (const endpoint of dynamicEndpoints) {
if (client.subscriptions.has(endpoint) && !(endpoint === 'hello' && client.cache['hello'] || endpoint === 'time' && client.cache['time'])) {
await fetchAndSendUpdate(ws, endpoint, client, docker);
}
}
} catch (error) {
console.error('Error in dynamic endpoints interval:', error.message);
}
}, parseInt(process.env.DYNAMIC_ENDPOINTS_INTERVAL_MS, 10)));
client.intervals.push(setInterval(async () => {
try {
for (const endpoint of staticEndpoints) {
if (client.subscriptions.has(endpoint)) {
await fetchAndSendUpdate(ws, endpoint, client, docker);
}
}
} catch (error) {
console.error('Error in static endpoints interval:', error.message);
}
}, parseInt(process.env.STATIC_ENDPOINTS_INTERVAL_MS, 10)));
if (client.subscriptions.has('list-players') && user !== 'Unknown') {
try {
const container = docker.getContainer(user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Starting list-players interval for ${user}`);
client.intervals.push(setInterval(() => fetchAndSendUpdate(ws, 'list-players', client, docker), parseInt(process.env.LIST_PLAYERS_INTERVAL_MS, 10)));
} else {
console.log(`Container ${user} is not running, skipping list-players interval`);
ws.send(JSON.stringify({ type: 'list-players', error: `Container ${user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for list-players for ${user}:`, error.message);
ws.send(JSON.stringify({ type: 'list-players', error: `Failed to check container status: ${error.message}` }));
}
}
} else {
console.error('Invalid /hello response:', hello);
ws.send(JSON.stringify({ type: 'hello', error: 'Invalid hello response' }));
}
} else if (data.type === 'updateUser') {
client.user = data.user;
console.log(`Updated user to: ${client.user}`);
if (client.user !== 'Unknown') {
client.intervals.forEach(clearInterval);
client.intervals = [];
['connectionStatusInterval', 'geyserStatusInterval', 'sftpStatusInterval', 'statusCheckMonitorInterval'].forEach((key) => {
if (client[key]) clearInterval(client[key]);
client[key] = null;
});
if (client.subscriptions.has('docker')) {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Starting docker stats interval for new user ${client.user}`);
client.intervals.push(setInterval(async () => {
try {
console.log(`Fetching docker stats for ${client.user}`);
const stats = await getContainerStats(docker, client.user);
if (stats.error) {
console.error(`Error fetching stats for ${client.user}: ${stats.error}`);
ws.send(JSON.stringify({ type: 'docker', error: stats.error }));
} else {
console.log(`Sending docker stats for ${client.user}:`, stats);
ws.send(JSON.stringify({ type: 'docker', data: { ...stats, user: client.user } }));
}
} catch (error) {
console.error(`Error in docker stats interval for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'docker', error: `Failed to fetch stats: ${error.message}` }));
}
}, parseInt(process.env.DOCKER_STATS_INTERVAL_MS, 10)));
// Send initial stats immediately
console.log(`Sending initial docker stats for ${client.user}`);
const initialStats = await getContainerStats(docker, client.user);
ws.send(JSON.stringify({ type: 'docker', data: { ...initialStats, user: client.user } }));
} else {
console.log(`Container ${client.user} is not running, skipping docker stats interval`);
ws.send(JSON.stringify({ type: 'docker', error: `Container ${client.user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for docker stats for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'docker', error: `Failed to check container status: ${error.message}` }));
}
}
if (client.subscriptions.has('list-players')) {
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Starting list-players interval for new user ${client.user}`);
client.intervals.push(setInterval(() => fetchAndSendUpdate(ws, 'list-players', client, docker), parseInt(process.env.LIST_PLAYERS_NEW_USER_INTERVAL_MS, 10)));
} else {
console.log(`Container ${client.user} is not running, skipping list-players interval`);
ws.send(JSON.stringify({ type: 'list-players', error: `Container ${client.user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for list-players for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'list-players', error: `Failed to check container status: ${error.message}` }));
}
}
await manageStatusChecks(ws, client, client.user, docker);
if (client.subscriptions.has('docker-logs')) {
if (client.logStream) {
client.logStream.destroy();
client.logStream = null;
}
console.log(`Starting docker logs stream for new user ${client.user}`);
await streamContainerLogs(docker, ws, client.user, client);
}
}
} else if (data.type === 'request') {
const { requestId, endpoint, method, body } = data;
let response;
if (endpoint.startsWith('/docker') || endpoint === '/docker') {
response = client.user === 'Unknown' ? { error: 'User not identified' } : await getContainerStats(docker, client.user);
console.log(`Docker stats request response for ${client.user}:`, response);
} else if (endpoint === '/search' && method === 'POST' && body) {
response = await apiRequest(endpoint, client.apiKey, method, body);
response.totalResults = response.totalResults || (response.results ? response.results.length : 0);
} else if (endpoint === '/server-properties' && method === 'GET') {
response = client.user === 'Unknown' ? { error: 'User not identified' } : await readServerProperties(docker, client.user);
} else if (endpoint === '/server-properties' && method === 'POST' && body && body.content) {
response = client.user === 'Unknown' ? { error: 'User not identified' } : await writeServerProperties(docker, client.user, body.content);
} else {
response = await apiRequest(endpoint, client.apiKey, method, body);
}
ws.send(JSON.stringify({ requestId, ...response }));
if (['my-link', 'my-geyser-link', 'my-sftp'].includes(endpoint) && !response.error) {
await fetchAndSendUpdate(ws, endpoint, client, docker);
if (endpoint === 'my-link') {
const linkData = await apiRequest('/my-link-cache', client.apiKey);
if (!linkData.error) {
client.cache['my-link-cache'] = linkData;
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Performing status check after my-link request for ${client.user}`);
const status = await checkConnectionStatus(linkData.hostname, linkData.port);
ws.send(JSON.stringify({ type: 'connection-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'connection-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'connection-status', error: `Failed to check container status: ${error.message}` }));
}
}
} else if (endpoint === 'my-geyser-link') {
const geyserData = await apiRequest('/my-geyser-cache', client.apiKey);
if (!geyserData.error) {
client.cache['my-geyser-cache'] = geyserData;
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Performing status check after my-geyser-link request for ${client.user}`);
const status = await checkGeyserStatus(geyserData.hostname, geyserData.port);
ws.send(JSON.stringify({ type: 'geyser-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'geyser-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'geyser-status', error: `Failed to check container status: ${error.message}` }));
}
}
} else if (endpoint === 'my-sftp') {
const sftpData = await apiRequest('/my-sftp-cache', client.apiKey);
if (!sftpData.error) {
client.cache['my-sftp-cache'] = sftpData;
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
console.log(`Performing status check after my-sftp request for ${client.user}`);
const status = await checkSftpStatus(sftpData.hostname, sftpData.port);
ws.send(JSON.stringify({ type: 'sftp-status', data: { isOnline: status.isOnline } }));
} else {
ws.send(JSON.stringify({ type: 'sftp-status', error: `Container ${client.user} is not running` }));
}
} catch (error) {
console.error(`Error checking container status for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'sftp-status', error: `Failed to check container status: ${error.message}` }));
}
}
}
}
} else if (['kick-player', 'ban-player', 'op-player', 'deop-player'].includes(data.type)) {
const { requestId, player } = data;
if (!player) {
ws.send(JSON.stringify({ requestId, error: 'Player name is required' }));
return;
}
if (client.user === 'Unknown') {
ws.send(JSON.stringify({ requestId, error: 'User not identified' }));
return;
}
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
ws.send(JSON.stringify({ requestId, error: `Container ${client.user} is not running` }));
return;
}
const command = {
'kick-player': `kick ${player}`,
'ban-player': `ban ${player}`,
'op-player': `op ${player}`,
'deop-player': `deop ${player}`
}[data.type];
const response = await apiRequest('/console', client.apiKey, 'POST', { command });
if (!response.error) {
const playerListResponse = await apiRequest('/list-players', client.apiKey);
if (!playerListResponse.error) {
ws.send(JSON.stringify({ type: 'list-players', data: playerListResponse }));
}
}
ws.send(JSON.stringify({ requestId, ...response }));
} catch (error) {
console.error(`Error processing ${data.type} for ${player}:`, error.message);
ws.send(JSON.stringify({ requestId, error: `Failed to process command: ${error.message}` }));
}
} else if (data.type === 'tell-player') {
const { requestId, player, message } = data;
if (!player || !message) {
ws.send(JSON.stringify({ requestId, error: 'Player name and message are required' }));
return;
}
if (client.user === 'Unknown') {
ws.send(JSON.stringify({ requestId, error: 'User not identified' }));
return;
}
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
ws.send(JSON.stringify({ requestId, error: `Container ${client.user} is not running` }));
return;
}
const response = await apiRequest('/tell', client.apiKey, 'POST', { username: player, message });
ws.send(JSON.stringify({ requestId, ...response }));
} catch (error) {
console.error(`Error sending message to ${player}:`, error.message);
ws.send(JSON.stringify({ requestId, error: `Failed to send message: ${error.message}` }));
}
} else if (data.type === 'give-player') {
const { requestId, player, item, amount } = data;
if (!player || !item || !amount) {
ws.send(JSON.stringify({ requestId, error: 'Player name, item, and amount are required' }));
return;
}
if (client.user === 'Unknown') {
ws.send(JSON.stringify({ requestId, error: 'User not identified' }));
return;
}
try {
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status !== 'running') {
ws.send(JSON.stringify({ requestId, error: `Container ${client.user} is not running` }));
return;
}
const response = await apiRequest('/give', client.apiKey, 'POST', { username: player, item, amount });
ws.send(JSON.stringify({ requestId, ...response }));
} catch (error) {
console.error(`Error giving item to ${player}:`, error.message);
ws.send(JSON.stringify({ requestId, error: `Failed to give item: ${error.message}` }));
}
} else if (data.type === 'refresh') {
console.log('Processing refresh request');
delete client.cache['hello'];
delete client.cache['time'];
await Promise.all([
...staticEndpoints.filter(e => client.subscriptions.has(e)).map(e => fetchAndSendUpdate(ws, e, client, docker)),
...dynamicEndpoints.filter(e => client.subscriptions.has(e)).map(e => fetchAndSendUpdate(ws, e, client, docker)),
client.subscriptions.has('list-players') ? fetchAndSendUpdate(ws, 'list-players', client, docker) : null
].filter(Boolean));
if (client.user && client.user !== 'Unknown') {
try {
const stats = await getContainerStats(docker, client.user);
console.log(`Sending refreshed docker stats for ${client.user}:`, stats);
ws.send(JSON.stringify({ type: 'docker', data: { ...stats, user: client.user } }));
const container = docker.getContainer(client.user);
const inspect = await container.inspect();
if (inspect.State.Status === 'running') {
const linkData = client.cache['my-link-cache'];
if (linkData && linkData.hostname && linkData.port && client.subscriptions.has('my-link-cache')) {
console.log(`Performing refresh connection status check for ${client.user}`);
const status = await checkConnectionStatus(linkData.hostname, linkData.port);
ws.send(JSON.stringify({ type: 'connection-status', data: { isOnline: status.isOnline } }));
}
const geyserData = client.cache['my-geyser-cache'];
if (geyserData && geyserData.hostname && geyserData.port && client.subscriptions.has('my-geyser-cache')) {
console.log(`Performing refresh Geyser status check for ${client.user}`);
const status = await checkGeyserStatus(geyserData.hostname, geyserData.port);
ws.send(JSON.stringify({ type: 'geyser-status', data: { isOnline: status.isOnline } }));
}
const sftpData = client.cache['my-sftp-cache'];
if (sftpData && sftpData.hostname && sftpData.port && client.subscriptions.has('my-sftp-cache')) {
console.log(`Performing refresh SFTP status check for ${client.user}`);
const status = await checkSftpStatus(sftpData.hostname, sftpData.port);
ws.send(JSON.stringify({ type: 'sftp-status', data: { isOnline: status.isOnline } }));
}
} else {
if (client.subscriptions.has('my-link-cache')) {
ws.send(JSON.stringify({ type: 'connection-status', error: `Container ${client.user} is not running` }));
}
if (client.subscriptions.has('my-geyser-cache')) {
ws.send(JSON.stringify({ type: 'geyser-status', error: `Container ${client.user} is not running` }));
}
if (client.subscriptions.has('my-sftp-cache')) {
ws.send(JSON.stringify({ type: 'sftp-status', error: `Container ${client.user} is not running` }));
}
}
} catch (error) {
console.error(`Error during refresh for ${client.user}:`, error.message);
ws.send(JSON.stringify({ type: 'docker', error: `Failed to refresh stats: ${error.message}` }));
}
}
}
} catch (error) {
console.error('WebSocket message error:', error.message);
ws.send(JSON.stringify({ error: `Invalid message: ${error.message}` }));
}
});
ws.on('close', () => {
try {
const client = clients.get(ws);
client.intervals.forEach(clearInterval);
if (client.logStream) {
client.logStream.destroy();
client.logStream = null;
}
['connectionStatusInterval', 'geyserStatusInterval', 'sftpStatusInterval', 'statusCheckMonitorInterval'].forEach((key) => {
if (client[key]) clearInterval(client[key]);
});
clients.delete(ws);
console.log('WebSocket client disconnected');
} catch (error) {
console.error('Error on WebSocket close:', error.message);
}
});
ws.on('error', (error) => console.error('WebSocket error:', error.message));
}

View File

@ -3,6 +3,7 @@
"version": "1.0.0", "version": "1.0.0",
"description": "Web panel for My-MC API with Docker integration", "description": "Web panel for My-MC API with Docker integration",
"main": "server.js", "main": "server.js",
"type": "module",
"scripts": { "scripts": {
"start": "node server.js", "start": "node server.js",
"build:css": "postcss public/css/styles.css -o public/css/styles.min.css", "build:css": "postcss public/css/styles.css -o public/css/styles.min.css",

1545
server.js

File diff suppressed because it is too large Load Diff