const express = require('express'); const { ChartJSNodeCanvas } = require('chartjs-node-canvas'); const { createCanvas, loadImage } = require('canvas'); const cors = require('cors'); const Docker = require('dockerode'); const WebSocket = require('ws'); const app = express(); const port = 6667; app.use(cors()); const docker = new Docker(); const wss = new WebSocket.Server({ noServer: true }); const metricWidth = 1900; const metricHeight = 400; const titleHeight = 100; const graphMargin = 30; app.use((req, res, next) => { res.set({ 'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate', 'Pragma': 'no-cache', 'Expires': '0', 'Surrogate-Control': 'no-store' }); next(); }); const chartJSMetricCanvas = new ChartJSNodeCanvas({ width: metricWidth, height: metricHeight, backgroundColour: 'black' }); const fetchMetricData = async (metric, containerId, timeframe = 5) => { try { const container = docker.getContainer(containerId); const statsStream = await container.stats({ stream: true }); let metrics = []; const endTime = Date.now(); const startTime = endTime - (timeframe * 60 * 1000); return new Promise((resolve, reject) => { statsStream.on('data', (chunk) => { const stat = JSON.parse(chunk.toString()); const timestamp = new Date(stat.read).getTime(); if (timestamp < startTime) return; let value; switch (metric) { case 'cpu': const cpuDelta = stat.cpu_stats.cpu_usage.total_usage - (stat.precpu_stats.cpu_usage?.total_usage || 0); const systemDelta = stat.cpu_stats.system_cpu_usage - (stat.precpu_stats.system_cpu_usage || 0); value = cpuDelta / systemDelta * stat.cpu_stats.online_cpus * 100; break; case 'memory': const activeAnon = stat.memory_stats.stats?.active_anon || 0; const inactiveAnon = stat.memory_stats.stats?.inactive_anon || 0; const slab = stat.memory_stats.stats?.slab || 0; const kernelStack = stat.memory_stats.stats?.kernel_stack || 0; const residentMemory = activeAnon + inactiveAnon + slab + kernelStack; value = residentMemory / 1024 / 1024; // Convert to MB // console.log(`API Memory usage for ${containerId}: ${value.toFixed(2)} MB, Raw stats:`, stat.memory_stats); break; case 'io': const read = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Read')?.value || 0; const write = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Write')?.value || 0; value = { read: read / 1024 / 1024, write: write / 1024 / 1024 }; // Convert to MB break; case 'pids': value = stat.pids_stats.current || 0; break; case 'network': const rx = stat.networks?.eth0?.rx_bytes || 0; const tx = stat.networks?.eth0?.tx_bytes || 0; value = { received: rx / 1024, sent: tx / 1024 }; // Convert to KB break; } metrics.push([Math.floor(timestamp / 1000), value]); if (timestamp >= endTime) { statsStream.destroy(); resolve({ data: metrics }); } }); statsStream.on('error', reject); }); } catch (error) { console.error(`Error fetching ${metric} data for container ${containerId}:`, error); throw new Error(`Failed to fetch ${metric} data.`); } }; const extractMetrics = (data, metric) => { const labels = data.data.map((entry) => new Date(entry[0] * 1000).toLocaleTimeString()); let values; switch (metric) { case 'cpu': values = data.data.map(entry => entry[1]); break; case 'memory': values = data.data.map(entry => entry[1]); break; case 'io': values = { read: data.data.map(entry => entry[1].read), write: data.data.map(entry => entry[1].write), }; break; case 'pids': values = data.data.map(entry => entry[1]); break; case 'network': values = { received: data.data.map(entry => entry[1].received), sent: data.data.map(entry => -entry[1].sent), // Negate sent values }; break; default: values = []; } return { labels, values }; }; const generateMetricGraph = async (metric, labels, label, borderColor) => { const configuration = { type: 'line', data: { labels: labels, datasets: [{ label: label, data: metric, borderColor: borderColor, fill: false, tension: 0.1, }], }, options: { scales: { x: { title: { display: true, text: 'Time', color: 'white', }, }, y: { title: { display: true, text: `${label} Usage`, color: 'white', }, }, }, plugins: { legend: { labels: { color: 'white', }, }, }, }, }; return chartJSMetricCanvas.renderToBuffer(configuration); }; const drawTitle = (ctx, text, yPos) => { ctx.fillStyle = 'white'; ctx.font = 'bold 40px Arial'; const textWidth = ctx.measureText(text).width; ctx.fillText(text, (metricWidth - textWidth) / 2, yPos); }; // CPU Usage app.get('/api/graph/cpu/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const data = await fetchMetricData('cpu', containerId, timeframe); if (format === 'json') { return res.json(data); } const { labels, values } = extractMetrics(data, 'cpu'); const imageBuffer = await generateMetricGraph(values, labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); res.set('Content-Type', 'image/png'); res.send(imageBuffer); } catch (error) { res.status(500).send(`Error generating CPU graph: ${error.message}`); } }); // Memory Usage app.get('/api/graph/memory/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const data = await fetchMetricData('memory', containerId, timeframe); if (format === 'json') { return res.json(data); } const { labels, values } = extractMetrics(data, 'memory'); const imageBuffer = await generateMetricGraph(values, labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); res.set('Content-Type', 'image/png'); res.send(imageBuffer); } catch (error) { res.status(500).send(`Error generating memory graph: ${error.message}`); } }); // Disk I/O app.get('/api/graph/io/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const data = await fetchMetricData('io', containerId, timeframe); if (format === 'json') { return res.json(data); } const { labels, values } = extractMetrics(data, 'io'); const readBuffer = await generateMetricGraph(values.read, labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)'); const writeBuffer = await generateMetricGraph(values.write, labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)'); const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); const ctx = canvas.getContext('2d'); ctx.fillStyle = 'black'; ctx.fillRect(0, 0, canvas.width, canvas.height); drawTitle(ctx, `Disk Read for ${containerId}`, 40); let img = await loadImage(readBuffer); ctx.drawImage(img, 0, 50, metricWidth, metricHeight); drawTitle(ctx, `Disk Write for ${containerId}`, metricHeight + 100); img = await loadImage(writeBuffer); ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); res.set('Content-Type', 'image/png'); res.send(canvas.toBuffer()); } catch (error) { res.status(500).send('Error generating disk I/O graphs.'); } }); // PIDs app.get('/api/graph/pids/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const data = await fetchMetricData('pids', containerId, timeframe); if (format === 'json') { return res.json(data); } const { labels, values } = extractMetrics(data, 'pids'); const imageBuffer = await generateMetricGraph(values, labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); res.set('Content-Type', 'image/png'); res.send(imageBuffer); } catch (error) { res.status(500).send(`Error generating PIDs graph: ${error.message}`); } }); // Network Traffic app.get('/api/graph/network/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const data = await fetchMetricData('network', containerId, timeframe); if (format === 'json') { return res.json(data); } const { labels, values } = extractMetrics(data, 'network'); const receivedBuffer = await generateMetricGraph(values.received, labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)'); const sentBuffer = await generateMetricGraph(values.sent, labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)'); const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); const ctx = canvas.getContext('2d'); ctx.fillStyle = 'black'; ctx.fillRect(0, 0, canvas.width, canvas.height); drawTitle(ctx, `Network Received for ${containerId}`, 40); let img = await loadImage(receivedBuffer); ctx.drawImage(img, 0, 50, metricWidth, metricHeight); drawTitle(ctx, `Network Sent for ${containerId}`, metricHeight + 100); img = await loadImage(sentBuffer); ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); res.set('Content-Type', 'image/png'); res.send(canvas.toBuffer()); } catch (error) { res.status(500).send('Error generating network graphs.'); } }); // Full Report app.get('/api/graph/full-report/:containerId', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 5; const format = req.query.format || 'graph'; try { const cpuData = await fetchMetricData('cpu', containerId, timeframe); const memoryData = await fetchMetricData('memory', containerId, timeframe); const ioData = await fetchMetricData('io', containerId, timeframe); const pidsData = await fetchMetricData('pids', containerId, timeframe); const networkData = await fetchMetricData('network', containerId, timeframe); if (format === 'json') { return res.json({ cpu: cpuData, memory: memoryData, io: ioData, pids: pidsData, network: networkData, }); } const cpuMetrics = extractMetrics(cpuData, 'cpu'); const memoryMetrics = extractMetrics(memoryData, 'memory'); const ioMetrics = extractMetrics(ioData, 'io'); const pidsMetrics = extractMetrics(pidsData, 'pids'); const networkMetrics = extractMetrics(networkData, 'network'); const cpuBuffer = await generateMetricGraph(cpuMetrics.values, cpuMetrics.labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); const memoryBuffer = await generateMetricGraph(memoryMetrics.values, memoryMetrics.labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); const ioReadBuffer = await generateMetricGraph(ioMetrics.values.read, ioMetrics.labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)'); const ioWriteBuffer = await generateMetricGraph(ioMetrics.values.write, ioMetrics.labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)'); const pidsBuffer = await generateMetricGraph(pidsMetrics.values, pidsMetrics.labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); const networkReceivedBuffer = await generateMetricGraph(networkMetrics.values.received, networkMetrics.labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)'); const networkSentBuffer = await generateMetricGraph(networkMetrics.values.sent, networkMetrics.labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)'); const numGraphs = 7; const fullReportHeight = titleHeight + (numGraphs * (metricHeight + graphMargin)); const canvas = createCanvas(metricWidth, fullReportHeight); const ctx = canvas.getContext('2d'); ctx.fillStyle = 'black'; ctx.fillRect(0, 0, canvas.width, canvas.height); drawTitle(ctx, `Full Report for ${containerId} (Last ${timeframe} minutes)`, 50); const graphs = [cpuBuffer, memoryBuffer, ioReadBuffer, ioWriteBuffer, pidsBuffer, networkReceivedBuffer, networkSentBuffer]; let yPosition = titleHeight + 20; for (const imageBuffer of graphs) { const img = await loadImage(imageBuffer); ctx.drawImage(img, 0, yPosition, metricWidth, metricHeight); yPosition += metricHeight + graphMargin; } res.set('Content-Type', 'image/png'); res.send(canvas.toBuffer()); } catch (error) { res.status(500).send('Error generating full report.'); } }); // Live Report with WebSocket app.get('/api/graph/full-report/:containerId/live', async (req, res) => { const { containerId } = req.params; const timeframe = parseInt(req.query.timeframe) || 2; const maxPoints = 30; const html = `
PID | User | Command |
---|