823 lines
28 KiB
JavaScript
823 lines
28 KiB
JavaScript
const express = require('express');
|
|
const { ChartJSNodeCanvas } = require('chartjs-node-canvas');
|
|
const { createCanvas, loadImage } = require('canvas');
|
|
const cors = require('cors');
|
|
const Docker = require('dockerode');
|
|
const WebSocket = require('ws');
|
|
|
|
const app = express();
|
|
const port = 6667;
|
|
app.use(cors());
|
|
|
|
const docker = new Docker();
|
|
const wss = new WebSocket.Server({ noServer: true });
|
|
|
|
const metricWidth = 1900;
|
|
const metricHeight = 400;
|
|
const titleHeight = 100;
|
|
const graphMargin = 30;
|
|
|
|
app.use((req, res, next) => {
|
|
res.set({
|
|
'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate',
|
|
'Pragma': 'no-cache',
|
|
'Expires': '0',
|
|
'Surrogate-Control': 'no-store'
|
|
});
|
|
next();
|
|
});
|
|
|
|
const chartJSMetricCanvas = new ChartJSNodeCanvas({ width: metricWidth, height: metricHeight, backgroundColour: 'black' });
|
|
|
|
const fetchMetricData = async (metric, containerId, timeframe = 5) => {
|
|
try {
|
|
const container = docker.getContainer(containerId);
|
|
const statsStream = await container.stats({ stream: true });
|
|
|
|
let metrics = [];
|
|
const endTime = Date.now();
|
|
const startTime = endTime - (timeframe * 60 * 1000);
|
|
|
|
return new Promise((resolve, reject) => {
|
|
statsStream.on('data', (chunk) => {
|
|
const stat = JSON.parse(chunk.toString());
|
|
const timestamp = new Date(stat.read).getTime();
|
|
|
|
if (timestamp < startTime) return;
|
|
|
|
let value;
|
|
switch (metric) {
|
|
case 'cpu':
|
|
const cpuDelta = stat.cpu_stats.cpu_usage.total_usage - (stat.precpu_stats.cpu_usage?.total_usage || 0);
|
|
const systemDelta = stat.cpu_stats.system_cpu_usage - (stat.precpu_stats.system_cpu_usage || 0);
|
|
value = cpuDelta / systemDelta * stat.cpu_stats.online_cpus * 100;
|
|
break;
|
|
case 'memory':
|
|
const activeAnon = stat.memory_stats.stats?.active_anon || 0;
|
|
const inactiveAnon = stat.memory_stats.stats?.inactive_anon || 0;
|
|
const slab = stat.memory_stats.stats?.slab || 0;
|
|
const kernelStack = stat.memory_stats.stats?.kernel_stack || 0;
|
|
const residentMemory = activeAnon + inactiveAnon + slab + kernelStack;
|
|
value = residentMemory / 1024 / 1024; // Convert to MB
|
|
// console.log(`API Memory usage for ${containerId}: ${value.toFixed(2)} MB, Raw stats:`, stat.memory_stats);
|
|
break;
|
|
case 'io':
|
|
const read = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Read')?.value || 0;
|
|
const write = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Write')?.value || 0;
|
|
value = { read: read / 1024 / 1024, write: write / 1024 / 1024 }; // Convert to MB
|
|
break;
|
|
case 'pids':
|
|
value = stat.pids_stats.current || 0;
|
|
break;
|
|
case 'network':
|
|
const rx = stat.networks?.eth0?.rx_bytes || 0;
|
|
const tx = stat.networks?.eth0?.tx_bytes || 0;
|
|
value = { received: rx / 1024, sent: tx / 1024 }; // Convert to KB
|
|
break;
|
|
}
|
|
|
|
metrics.push([Math.floor(timestamp / 1000), value]);
|
|
|
|
if (timestamp >= endTime) {
|
|
statsStream.destroy();
|
|
resolve({ data: metrics });
|
|
}
|
|
});
|
|
|
|
statsStream.on('error', reject);
|
|
});
|
|
} catch (error) {
|
|
console.error(`Error fetching ${metric} data for container ${containerId}:`, error);
|
|
throw new Error(`Failed to fetch ${metric} data.`);
|
|
}
|
|
};
|
|
|
|
const extractMetrics = (data, metric) => {
|
|
const labels = data.data.map((entry) => new Date(entry[0] * 1000).toLocaleTimeString());
|
|
let values;
|
|
|
|
switch (metric) {
|
|
case 'cpu':
|
|
values = data.data.map(entry => entry[1]);
|
|
break;
|
|
case 'memory':
|
|
values = data.data.map(entry => entry[1]);
|
|
break;
|
|
case 'io':
|
|
values = {
|
|
read: data.data.map(entry => entry[1].read),
|
|
write: data.data.map(entry => entry[1].write),
|
|
};
|
|
break;
|
|
case 'pids':
|
|
values = data.data.map(entry => entry[1]);
|
|
break;
|
|
case 'network':
|
|
values = {
|
|
received: data.data.map(entry => entry[1].received),
|
|
sent: data.data.map(entry => -entry[1].sent), // Negate sent values
|
|
};
|
|
break;
|
|
default:
|
|
values = [];
|
|
}
|
|
|
|
return { labels, values };
|
|
};
|
|
|
|
const generateMetricGraph = async (metric, labels, label, borderColor) => {
|
|
const configuration = {
|
|
type: 'line',
|
|
data: {
|
|
labels: labels,
|
|
datasets: [{
|
|
label: label,
|
|
data: metric,
|
|
borderColor: borderColor,
|
|
fill: false,
|
|
tension: 0.1,
|
|
}],
|
|
},
|
|
options: {
|
|
scales: {
|
|
x: {
|
|
title: {
|
|
display: true,
|
|
text: 'Time',
|
|
color: 'white',
|
|
},
|
|
},
|
|
y: {
|
|
title: {
|
|
display: true,
|
|
text: `${label} Usage`,
|
|
color: 'white',
|
|
},
|
|
},
|
|
},
|
|
plugins: {
|
|
legend: {
|
|
labels: {
|
|
color: 'white',
|
|
},
|
|
},
|
|
},
|
|
},
|
|
};
|
|
|
|
return chartJSMetricCanvas.renderToBuffer(configuration);
|
|
};
|
|
|
|
const drawTitle = (ctx, text, yPos) => {
|
|
ctx.fillStyle = 'white';
|
|
ctx.font = 'bold 40px Arial';
|
|
const textWidth = ctx.measureText(text).width;
|
|
ctx.fillText(text, (metricWidth - textWidth) / 2, yPos);
|
|
};
|
|
|
|
// CPU Usage
|
|
app.get('/api/graph/cpu/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('cpu', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'cpu');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating CPU graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Memory Usage
|
|
app.get('/api/graph/memory/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('memory', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'memory');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating memory graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Disk I/O
|
|
app.get('/api/graph/io/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('io', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'io');
|
|
const readBuffer = await generateMetricGraph(values.read, labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)');
|
|
const writeBuffer = await generateMetricGraph(values.write, labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)');
|
|
|
|
const canvas = createCanvas(metricWidth, metricHeight * 2 + 100);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Disk Read for ${containerId}`, 40);
|
|
let img = await loadImage(readBuffer);
|
|
ctx.drawImage(img, 0, 50, metricWidth, metricHeight);
|
|
|
|
drawTitle(ctx, `Disk Write for ${containerId}`, metricHeight + 100);
|
|
img = await loadImage(writeBuffer);
|
|
ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight);
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating disk I/O graphs.');
|
|
}
|
|
});
|
|
|
|
// PIDs
|
|
app.get('/api/graph/pids/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('pids', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'pids');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating PIDs graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Network Traffic
|
|
app.get('/api/graph/network/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('network', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'network');
|
|
|
|
const receivedBuffer = await generateMetricGraph(values.received, labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)');
|
|
const sentBuffer = await generateMetricGraph(values.sent, labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)');
|
|
|
|
const canvas = createCanvas(metricWidth, metricHeight * 2 + 100);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Network Received for ${containerId}`, 40);
|
|
let img = await loadImage(receivedBuffer);
|
|
ctx.drawImage(img, 0, 50, metricWidth, metricHeight);
|
|
|
|
drawTitle(ctx, `Network Sent for ${containerId}`, metricHeight + 100);
|
|
img = await loadImage(sentBuffer);
|
|
ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight);
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating network graphs.');
|
|
}
|
|
});
|
|
|
|
// Full Report
|
|
app.get('/api/graph/full-report/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const cpuData = await fetchMetricData('cpu', containerId, timeframe);
|
|
const memoryData = await fetchMetricData('memory', containerId, timeframe);
|
|
const ioData = await fetchMetricData('io', containerId, timeframe);
|
|
const pidsData = await fetchMetricData('pids', containerId, timeframe);
|
|
const networkData = await fetchMetricData('network', containerId, timeframe);
|
|
|
|
if (format === 'json') {
|
|
return res.json({
|
|
cpu: cpuData,
|
|
memory: memoryData,
|
|
io: ioData,
|
|
pids: pidsData,
|
|
network: networkData,
|
|
});
|
|
}
|
|
|
|
const cpuMetrics = extractMetrics(cpuData, 'cpu');
|
|
const memoryMetrics = extractMetrics(memoryData, 'memory');
|
|
const ioMetrics = extractMetrics(ioData, 'io');
|
|
const pidsMetrics = extractMetrics(pidsData, 'pids');
|
|
const networkMetrics = extractMetrics(networkData, 'network');
|
|
|
|
const cpuBuffer = await generateMetricGraph(cpuMetrics.values, cpuMetrics.labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)');
|
|
const memoryBuffer = await generateMetricGraph(memoryMetrics.values, memoryMetrics.labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)');
|
|
const ioReadBuffer = await generateMetricGraph(ioMetrics.values.read, ioMetrics.labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)');
|
|
const ioWriteBuffer = await generateMetricGraph(ioMetrics.values.write, ioMetrics.labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)');
|
|
const pidsBuffer = await generateMetricGraph(pidsMetrics.values, pidsMetrics.labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)');
|
|
const networkReceivedBuffer = await generateMetricGraph(networkMetrics.values.received, networkMetrics.labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)');
|
|
const networkSentBuffer = await generateMetricGraph(networkMetrics.values.sent, networkMetrics.labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)');
|
|
|
|
const numGraphs = 7;
|
|
const fullReportHeight = titleHeight + (numGraphs * (metricHeight + graphMargin));
|
|
|
|
const canvas = createCanvas(metricWidth, fullReportHeight);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Full Report for ${containerId} (Last ${timeframe} minutes)`, 50);
|
|
|
|
const graphs = [cpuBuffer, memoryBuffer, ioReadBuffer, ioWriteBuffer, pidsBuffer, networkReceivedBuffer, networkSentBuffer];
|
|
let yPosition = titleHeight + 20;
|
|
|
|
for (const imageBuffer of graphs) {
|
|
const img = await loadImage(imageBuffer);
|
|
ctx.drawImage(img, 0, yPosition, metricWidth, metricHeight);
|
|
yPosition += metricHeight + graphMargin;
|
|
}
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating full report.');
|
|
}
|
|
});
|
|
|
|
// Live Report with WebSocket
|
|
app.get('/api/graph/full-report/:containerId/live', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 2;
|
|
const maxPoints = 30;
|
|
|
|
const html = `
|
|
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<meta charset="UTF-8">
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
<title>Live Report for ${containerId}</title>
|
|
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/css/bootstrap.min.css" rel="stylesheet">
|
|
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
|
<script src="https://cdn.jsdelivr.net/npm/particles.js"></script>
|
|
<style>
|
|
body {
|
|
background-color: #1c1c1c;
|
|
color: white;
|
|
font-family: Arial, sans-serif;
|
|
position: relative;
|
|
}
|
|
.chart-container {
|
|
position: relative;
|
|
height: 250px;
|
|
}
|
|
h1, h3 {
|
|
color: #f0f0f0;
|
|
}
|
|
canvas {
|
|
background-color: #2a2a2a;
|
|
border-radius: 8px;
|
|
}
|
|
.process-table {
|
|
margin-top: 20px;
|
|
color: white;
|
|
width: 100%;
|
|
border-collapse: collapse;
|
|
font-size: 0.9rem;
|
|
background-color: #1e1e1e;
|
|
}
|
|
.process-table th, .process-table td {
|
|
padding: 10px;
|
|
text-align: center;
|
|
border: 1px solid #444;
|
|
}
|
|
.process-table th {
|
|
background-color: #2e2e2e;
|
|
font-weight: 600;
|
|
color: #cfcfcf;
|
|
}
|
|
.process-table td {
|
|
background-color: #262626;
|
|
color: #bbbbbb;
|
|
}
|
|
.process-table tr:nth-child(even) {
|
|
background-color: #2a2a2a;
|
|
}
|
|
.process-table tr:hover {
|
|
background-color: #333;
|
|
color: #f1f1f1;
|
|
}
|
|
.particle-container {
|
|
position: fixed;
|
|
top: 0;
|
|
left: 0;
|
|
width: 100%;
|
|
height: 100%;
|
|
z-index: -1;
|
|
pointer-events: none;
|
|
}
|
|
#processSearch {
|
|
width: 100%;
|
|
padding: 8px;
|
|
margin-bottom: 12px;
|
|
background-color: #333;
|
|
border: 1px solid #3a3a3a;
|
|
color: #fff;
|
|
border-radius: 4px;
|
|
outline: none;
|
|
}
|
|
#processSearch:focus {
|
|
border-color: #3a3a3a;
|
|
box-shadow: 0 0 5px rgba(58, 58, 58, 0.6);
|
|
}
|
|
::-webkit-scrollbar {
|
|
width: 8px;
|
|
}
|
|
::-webkit-scrollbar-track {
|
|
background-color: #2a2a2a;
|
|
}
|
|
::-webkit-scrollbar-thumb {
|
|
background-color: #444;
|
|
border-radius: 10px;
|
|
}
|
|
::-webkit-scrollbar-thumb:hover {
|
|
background-color: #555;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div class="container mt-4">
|
|
<h3 class="text-center">Live Report for ${containerId}</h3>
|
|
<div class="row">
|
|
<div class="col-md-4">
|
|
<h3 class="text-center">CPU Usage</h3>
|
|
<div class="chart-container">
|
|
<canvas id="cpuChart"></canvas>
|
|
</div>
|
|
</div>
|
|
<div class="col-md-4">
|
|
<h3 class="text-center">Memory Usage</h3>
|
|
<div class="chart-container">
|
|
<canvas id="memoryChart"></canvas>
|
|
</div>
|
|
</div>
|
|
<div class="col-md-4">
|
|
<h3 class="text-center">Network Speed</h3>
|
|
<div class="chart-container">
|
|
<canvas id="networkChart"></canvas>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
<input type="text" id="processSearch" placeholder="Search processes..." onkeyup="filterProcessTable()" autofocus>
|
|
<table class="process-table">
|
|
<thead>
|
|
<tr>
|
|
<th>PID</th>
|
|
<th>User</th>
|
|
<th>Command</th>
|
|
</tr>
|
|
</thead>
|
|
<tbody id="processTableBody"></tbody>
|
|
</table>
|
|
</div>
|
|
<div id="fullPageParticles" class="particle-container"></div>
|
|
<script>
|
|
const cpuCtx = document.getElementById('cpuChart').getContext('2d');
|
|
const memoryCtx = document.getElementById('memoryChart').getContext('2d');
|
|
const networkCtx = document.getElementById('networkChart').getContext('2d');
|
|
let lastSearch = '';
|
|
|
|
function createParticles(particleId) {
|
|
particlesJS(particleId, {
|
|
particles: {
|
|
number: { value: 200, density: { enable: true, value_area: 800 } },
|
|
color: { value: "#ffffff" },
|
|
shape: { type: "circle", stroke: { width: 0, color: "#000000" } },
|
|
opacity: { value: 0.5, anim: { enable: true, speed: 1 } },
|
|
size: { value: 3, random: true },
|
|
line_linked: { enable: false },
|
|
move: {
|
|
enable: true,
|
|
speed: 1.5,
|
|
direction: "none",
|
|
random: false,
|
|
out_mode: "out",
|
|
bounce: false
|
|
}
|
|
},
|
|
retina_detect: true
|
|
});
|
|
}
|
|
|
|
createParticles('fullPageParticles');
|
|
|
|
function updateChartData(chart, labels, dataSetIndex, newData) {
|
|
chart.data.labels.push(labels);
|
|
chart.data.datasets[dataSetIndex].data.push(newData);
|
|
if (chart.data.labels.length > ${maxPoints}) {
|
|
chart.data.labels.shift();
|
|
chart.data.datasets[dataSetIndex].data.shift();
|
|
}
|
|
chart.update();
|
|
}
|
|
|
|
const cpuChart = new Chart(cpuCtx, {
|
|
type: 'line',
|
|
data: {
|
|
labels: [],
|
|
datasets: [{
|
|
label: 'CPU Usage (%)',
|
|
data: [],
|
|
borderColor: 'rgba(255, 99, 132, 1)',
|
|
borderWidth: 2,
|
|
pointRadius: 3,
|
|
fill: false,
|
|
}]
|
|
},
|
|
options: {
|
|
animation: { duration: 500 },
|
|
responsive: true,
|
|
maintainAspectRatio: false,
|
|
scales: {
|
|
x: { title: { display: true, text: '', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }},
|
|
y: { title: { display: true, text: 'CPU (%)', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }}
|
|
},
|
|
plugins: { legend: { display: false } }
|
|
}
|
|
});
|
|
|
|
const memoryChart = new Chart(memoryCtx, {
|
|
type: 'line',
|
|
data: {
|
|
labels: [],
|
|
datasets: [{
|
|
label: 'Memory Usage (MB)',
|
|
data: [],
|
|
borderColor: 'rgba(54, 162, 235, 1)',
|
|
borderWidth: 2,
|
|
pointRadius: 3,
|
|
fill: false,
|
|
}]
|
|
},
|
|
options: {
|
|
animation: { duration: 500 },
|
|
responsive: true,
|
|
maintainAspectRatio: false,
|
|
scales: {
|
|
x: { title: { display: true, text: '', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }},
|
|
y: { title: { display: true, text: 'Memory (MB)', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }}
|
|
},
|
|
plugins: { legend: { display: false } }
|
|
}
|
|
});
|
|
|
|
let currentNetworkUnit = 'KB/s';
|
|
|
|
const networkChart = new Chart(networkCtx, {
|
|
type: 'line',
|
|
data: {
|
|
labels: [],
|
|
datasets: [
|
|
{ label: 'Network Received (KB/s)', data: [], borderColor: 'rgba(75, 192, 192, 1)', borderWidth: 2, pointRadius: 3, fill: false },
|
|
{ label: 'Network Sent (KB/s)', data: [], borderColor: 'rgba(255, 159, 64, 1)', borderWidth: 2, pointRadius: 3, fill: false }
|
|
]
|
|
},
|
|
options: {
|
|
animation: { duration: 500 },
|
|
responsive: true,
|
|
maintainAspectRatio: false,
|
|
scales: {
|
|
x: { title: { display: true, text: '', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }},
|
|
y: { title: { display: true, text: 'Network (KB/s)', color: '#ffffff' }, grid: { color: 'rgba(255, 255, 255, 0.1)' }}
|
|
},
|
|
plugins: { legend: { display: true } }
|
|
}
|
|
});
|
|
|
|
function updateNetworkUnits(received, sent) {
|
|
const maxValue = Math.max(Math.abs(received), Math.abs(sent));
|
|
if (maxValue > 1024) {
|
|
if (currentNetworkUnit === 'KB/s') {
|
|
networkChart.options.scales.y.title.text = 'Network (MB/s)';
|
|
networkChart.data.datasets[0].label = 'Network Received (MB/s)';
|
|
networkChart.data.datasets[1].label = 'Network Sent (MB/s)';
|
|
currentNetworkUnit = 'MB/s';
|
|
}
|
|
return { received: received / 1024, sent: sent / 1024 };
|
|
} else {
|
|
if (currentNetworkUnit === 'MB/s') {
|
|
networkChart.options.scales.y.title.text = 'Network (KB/s)';
|
|
networkChart.data.datasets[0].label = 'Network Received (KB/s)';
|
|
networkChart.data.datasets[1].label = 'Network Sent (KB/s)';
|
|
currentNetworkUnit = 'KB/s';
|
|
}
|
|
return { received, sent };
|
|
}
|
|
}
|
|
|
|
function filterProcessTable() {
|
|
const searchInput = document.getElementById('processSearch').value.toLowerCase();
|
|
lastSearch = searchInput;
|
|
const table = document.getElementById('processTableBody');
|
|
const rows = table.getElementsByTagName('tr');
|
|
for (let i = 0; i < rows.length; i++) {
|
|
const rowData = rows[i].innerText.toLowerCase();
|
|
if (rowData.includes(searchInput)) {
|
|
rows[i].style.display = '';
|
|
} else {
|
|
rows[i].style.display = 'none';
|
|
}
|
|
}
|
|
}
|
|
|
|
function updateProcessList(processList) {
|
|
const processTableBody = document.getElementById('processTableBody');
|
|
processTableBody.innerHTML = '';
|
|
|
|
processList.forEach(proc => {
|
|
const command = proc[proc.length - 1].toLowerCase();
|
|
if (!command.includes("holesail") && !command.includes("null") && !command.includes("/start.sh")) {
|
|
const row = document.createElement('tr');
|
|
row.classList.add('fadeIn');
|
|
const pidCell = document.createElement('td');
|
|
const userCell = document.createElement('td');
|
|
const commandCell = document.createElement('td');
|
|
|
|
pidCell.textContent = proc[1];
|
|
userCell.textContent = proc[0];
|
|
commandCell.textContent = proc[proc.length - 1];
|
|
|
|
row.appendChild(pidCell);
|
|
row.appendChild(userCell);
|
|
row.appendChild(commandCell);
|
|
processTableBody.appendChild(row);
|
|
}
|
|
});
|
|
|
|
filterProcessTable();
|
|
}
|
|
|
|
const ws = new WebSocket('wss://' + "live.syscall.lol" + '/ws/${containerId}');
|
|
ws.onmessage = function(event) {
|
|
const data = JSON.parse(event.data);
|
|
const latestTime = new Date(data.timestamp * 1000).toLocaleTimeString();
|
|
|
|
updateChartData(cpuChart, latestTime, 0, data.cpu);
|
|
updateChartData(memoryChart, latestTime, 0, data.memory);
|
|
const updatedValues = updateNetworkUnits(data.network.received, data.network.sent);
|
|
updateChartData(networkChart, latestTime, 0, updatedValues.received);
|
|
updateChartData(networkChart, latestTime, 1, updatedValues.sent);
|
|
|
|
if (data.processes) {
|
|
updateProcessList(data.processes);
|
|
}
|
|
};
|
|
|
|
ws.onclose = function() {
|
|
console.log('WebSocket connection closed');
|
|
};
|
|
|
|
ws.onerror = function(error) {
|
|
console.error('WebSocket error:', error);
|
|
};
|
|
</script>
|
|
</body>
|
|
</html>
|
|
`;
|
|
|
|
res.send(html);
|
|
});
|
|
|
|
app.get('/api/processes/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
let processList = [];
|
|
try {
|
|
const container = docker.getContainer(containerId);
|
|
const processes = await container.top();
|
|
processList = processes.Processes || [];
|
|
} catch (err) {
|
|
console.error(`Error fetching processes for container ${containerId}:`, err);
|
|
return res.status(500).json({ error: 'Failed to fetch processes' });
|
|
}
|
|
res.json(processList);
|
|
});
|
|
|
|
// WebSocket handling
|
|
const server = app.listen(port, "0.0.0.0", () => {
|
|
console.log(`Server running on http://localhost:${port}`);
|
|
});
|
|
|
|
server.on('upgrade', (request, socket, head) => {
|
|
const pathname = request.url;
|
|
if (pathname.startsWith('/ws/')) {
|
|
wss.handleUpgrade(request, socket, head, (ws) => {
|
|
wss.emit('connection', ws, request);
|
|
});
|
|
} else {
|
|
socket.destroy();
|
|
}
|
|
});
|
|
|
|
wss.on('connection', (ws, request) => {
|
|
const containerId = request.url.split('/').pop();
|
|
let statsStream;
|
|
let prevNetwork = { rx: 0, tx: 0, timestamp: 0 };
|
|
|
|
const sendStats = async () => {
|
|
try {
|
|
const container = docker.getContainer(containerId);
|
|
statsStream = await container.stats({ stream: true });
|
|
|
|
statsStream.on('data', async (chunk) => {
|
|
const stat = JSON.parse(chunk.toString());
|
|
const currentTimestamp = new Date(stat.read).getTime() / 1000; // seconds
|
|
|
|
const cpuDelta = stat.cpu_stats.cpu_usage.total_usage - (stat.precpu_stats.cpu_usage?.total_usage || 0);
|
|
const systemDelta = stat.cpu_stats.system_cpu_usage - (stat.precpu_stats.system_cpu_usage || 0);
|
|
const cpu = cpuDelta / systemDelta * stat.cpu_stats.online_cpus * 100;
|
|
|
|
const activeAnon = stat.memory_stats.stats?.active_anon || 0;
|
|
const inactiveAnon = stat.memory_stats.stats?.inactive_anon || 0;
|
|
const slab = stat.memory_stats.stats?.slab || 0;
|
|
const kernelStack = stat.memory_stats.stats?.kernel_stack || 0;
|
|
const residentMemory = activeAnon + inactiveAnon + slab + kernelStack;
|
|
const memory = residentMemory / 1024 / 1024; // Convert to MB
|
|
// console.log(`Live Memory usage for ${containerId}: ${memory.toFixed(2)} MB, Raw stats:`, stat.memory_stats);
|
|
|
|
const rx = stat.networks?.eth0?.rx_bytes || 0;
|
|
const tx = stat.networks?.eth0?.tx_bytes || 0;
|
|
|
|
let network = { received: 0, sent: 0 };
|
|
if (prevNetwork.timestamp !== 0) {
|
|
const timeDiff = currentTimestamp - prevNetwork.timestamp; // seconds
|
|
if (timeDiff > 0) {
|
|
network.received = (rx - prevNetwork.rx) / 1024 / timeDiff; // KB/s
|
|
network.sent = -(tx - prevNetwork.tx) / 1024 / timeDiff; // Negate sent (KB/s)
|
|
}
|
|
}
|
|
prevNetwork = { rx, tx, timestamp: currentTimestamp };
|
|
|
|
let processes = [];
|
|
try {
|
|
const processData = await container.top();
|
|
processes = processData.Processes || [];
|
|
} catch (err) {
|
|
console.error(`Error fetching processes for ${containerId}:`, err);
|
|
processes = [];
|
|
}
|
|
|
|
ws.send(JSON.stringify({
|
|
timestamp: Math.floor(currentTimestamp),
|
|
cpu,
|
|
memory,
|
|
network,
|
|
processes
|
|
}));
|
|
});
|
|
|
|
statsStream.on('error', (err) => {
|
|
console.error(`Stats stream error for ${containerId}:`, err);
|
|
ws.close();
|
|
});
|
|
} catch (err) {
|
|
console.error(`Error setting up stats for ${containerId}:`, err);
|
|
ws.close();
|
|
}
|
|
};
|
|
|
|
sendStats();
|
|
|
|
ws.on('close', () => {
|
|
if (statsStream) statsStream.destroy();
|
|
});
|
|
}); |