From e9b4851d669d2c2a24dfc97205121f26402f36b2 Mon Sep 17 00:00:00 2001 From: dlinux-host Date: Tue, 22 Jul 2025 19:54:30 -0400 Subject: [PATCH] first commit --- .gitignore | 3 + graph.js | 779 ++++++++++++++++++++++++++++++++++++++++++++ live-view.js | 823 +++++++++++++++++++++++++++++++++++++++++++++++ package.json | 14 + public/live.html | 21 ++ public/live.js | 157 +++++++++ 6 files changed, 1797 insertions(+) create mode 100644 .gitignore create mode 100644 graph.js create mode 100644 live-view.js create mode 100644 package.json create mode 100644 public/live.html create mode 100644 public/live.js diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7545483 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +node_modules +package-lock.json +.env diff --git a/graph.js b/graph.js new file mode 100644 index 0000000..246aad7 --- /dev/null +++ b/graph.js @@ -0,0 +1,779 @@ +const express = require('express'); +const { ChartJSNodeCanvas } = require('chartjs-node-canvas'); +const { createCanvas, loadImage } = require('canvas'); +const axios = require('axios'); +const cors = require('cors'); // Import CORS middleware + +const app = express(); +const port = 6666; +app.use(cors()); // Allows all origins (wildcard *) + +const metricWidth = 1900; +const metricHeight = 400; +const titleHeight = 100; +const graphMargin = 30; + +app.use((req, res, next) => { + res.set({ + 'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate', + 'Pragma': 'no-cache', + 'Expires': '0', + 'Surrogate-Control': 'no-store' + }); + next(); +}); + +const chartJSMetricCanvas = new ChartJSNodeCanvas({ width: metricWidth, height: metricHeight, backgroundColour: 'black' }); + +const getEndpoints = (containerId, timeframe) => { + const after = -(timeframe * 60); + return { + cpu: `http://192.168.133.37:19999/api/v3/data?chart=cgroup_${containerId}.cpu&format=json&after=${after}&dimensions=user,system`, + memory: `http://192.168.133.37:19999/api/v3/data?chart=cgroup_${containerId}.mem_usage&format=json&after=${after}&dimensions=used`, + io: `http://192.168.133.37:19999/api/v3/data?chart=cgroup_${containerId}.io&format=json&after=${after}&dimensions=read,write`, + pids: `http://192.168.133.37:19999/api/v3/data?chart=cgroup_${containerId}.pids_current&format=json&after=${after}&dimensions=current`, + network: `http://192.168.133.37:19999/api/v3/data?chart=cgroup_${containerId}.net_eth0&format=json&after=${after}&dimensions=received,sent`, + }; +}; + +const fetchMetricData = async (metric, containerId, timeframe = 5) => { + const endpoints = getEndpoints(containerId, timeframe); + try { + const response = await axios.get(endpoints[metric]); + return response.data; + } catch (error) { + console.error(`Error fetching ${metric} data for container ${containerId}:`, error); + throw new Error(`Failed to fetch ${metric} data.`); + } +}; + +const extractMetrics = (data, metric) => { + const labels = data.result.data.map((entry) => new Date(entry[0] * 1000).toLocaleTimeString()); + let values; + + switch (metric) { + case 'cpu': + values = data.result.data.map(entry => entry[1] + entry[2]); + break; + case 'memory': + values = data.result.data.map(entry => entry[1] / 1024); // Convert KiB to MB + break; + case 'io': + values = { + read: data.result.data.map(entry => entry[1] / 1024), // Convert KiB/s to MB/s + write: data.result.data.map(entry => -entry[2] / 1024), // Convert KiB/s to MB/s and make positive + }; + break; + case 'pids': + values = data.result.data.map(entry => entry[1]); + break; + case 'network': + values = { + received: data.result.data.map(entry => entry[1] / 8), // Convert Kbits/s to KB/s + sent: data.result.data.map(entry => -entry[2] / 8), // Convert Kbits/s to KB/s and make positive + }; + break; + default: + values = []; + } + + return { labels, values }; +}; + +const generateMetricGraph = async (metric, labels, label, borderColor) => { + const configuration = { + type: 'line', + data: { + labels: labels, + datasets: [{ + label: label, + data: metric, + borderColor: borderColor, + fill: false, + tension: 0.1, + }], + }, + options: { + scales: { + x: { + title: { + display: true, + text: 'Time', + color: 'white', + }, + }, + y: { + title: { + display: true, + text: `${label} Usage`, + color: 'white', + }, + }, + }, + plugins: { + legend: { + labels: { + color: 'white', + }, + }, + }, + }, + }; + + return chartJSMetricCanvas.renderToBuffer(configuration); +}; + +// Draw title on the canvas +const drawTitle = (ctx, text, yPos) => { + ctx.fillStyle = 'white'; // Set text color + ctx.font = 'bold 40px Arial'; // Set font size and style + const textWidth = ctx.measureText(text).width; // Measure the width of the text + ctx.fillText(text, (metricWidth - textWidth) / 2, yPos); // Center the text horizontally +}; + + +// CPU Usage +app.get('/api/graph/cpu/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('cpu', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'cpu'); + const imageBuffer = await generateMetricGraph(values, labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating CPU graph: ${error.message}`); + } +}); + +// Memory Usage +app.get('/api/graph/memory/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('memory', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'memory'); + const imageBuffer = await generateMetricGraph(values, labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating memory graph: ${error.message}`); + } +}); + +// Disk I/O +app.get('/api/graph/io/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('io', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'io'); + const readBuffer = await generateMetricGraph(values.read, labels, 'Disk Read (MB/s)', 'rgba(54, 255, 132, 1)'); + const writeBuffer = await generateMetricGraph(values.write, labels, 'Disk Write (MB/s)', 'rgba(255, 99, 255, 1)'); + + const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Disk Read for ${containerId}`, 40); + let img = await loadImage(readBuffer); + ctx.drawImage(img, 0, 50, metricWidth, metricHeight); + + drawTitle(ctx, `Disk Write for ${containerId}`, metricHeight + 100); + img = await loadImage(writeBuffer); + ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating disk I/O graphs.'); + } +}); + +// PIDs +app.get('/api/graph/pids/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('pids', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'pids'); + const imageBuffer = await generateMetricGraph(values, labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating PIDs graph: ${error.message}`); + } +}); + +// Network Traffic +app.get('/api/graph/network/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('network', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'network'); + + const receivedBuffer = await generateMetricGraph(values.received, labels, 'Network Received (KB/s)', 'rgba(75, 192, 192, 1)'); + const sentBuffer = await generateMetricGraph(values.sent, labels, 'Network Sent (KB/s)', 'rgba(255, 159, 64, 1)'); + + const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Network Received for ${containerId}`, 40); + let img = await loadImage(receivedBuffer); + ctx.drawImage(img, 0, 50, metricWidth, metricHeight); + + drawTitle(ctx, `Network Sent for ${containerId}`, metricHeight + 100); + img = await loadImage(sentBuffer); + ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating network graphs.'); + } +}); + +// Full Report +app.get('/api/graph/full-report/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const cpuData = await fetchMetricData('cpu', containerId, timeframe); + const memoryData = await fetchMetricData('memory', containerId, timeframe); + const ioData = await fetchMetricData('io', containerId, timeframe); + const pidsData = await fetchMetricData('pids', containerId, timeframe); + const networkData = await fetchMetricData('network', containerId, timeframe); + + + if (format === 'json') { + return res.json({ + cpu: cpuData, + memory: memoryData, + io: ioData, + pids: pidsData, + network: networkData, + }); + } + + const cpuMetrics = extractMetrics(cpuData, 'cpu'); + const memoryMetrics = extractMetrics(memoryData, 'memory'); + const ioMetrics = extractMetrics(ioData, 'io'); + const pidsMetrics = extractMetrics(pidsData, 'pids'); + const networkMetrics = extractMetrics(networkData, 'network'); + + const cpuBuffer = await generateMetricGraph(cpuMetrics.values, cpuMetrics.labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); + const memoryBuffer = await generateMetricGraph(memoryMetrics.values, memoryMetrics.labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); + const ioReadBuffer = await generateMetricGraph(ioMetrics.values.read, ioMetrics.labels, 'Disk Read (MB/s)', 'rgba(54, 255, 132, 1)'); + const ioWriteBuffer = await generateMetricGraph(ioMetrics.values.write, ioMetrics.labels, 'Disk Write (MB/s)', 'rgba(255, 99, 255, 1)'); + const pidsBuffer = await generateMetricGraph(pidsMetrics.values, pidsMetrics.labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); + const networkReceivedBuffer = await generateMetricGraph(networkMetrics.values.received, networkMetrics.labels, 'Network Received (KB/s)', 'rgba(75, 192, 192, 1)'); + const networkSentBuffer = await generateMetricGraph(networkMetrics.values.sent, networkMetrics.labels, 'Network Sent (KB/s)', 'rgba(255, 159, 64, 1)'); + + const numGraphs = 7; + const fullReportHeight = titleHeight + (numGraphs * (metricHeight + graphMargin)); + + const canvas = createCanvas(metricWidth, fullReportHeight); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Full Report for ${containerId} (Last ${timeframe} minutes)`, 50); + + const graphs = [cpuBuffer, memoryBuffer, ioReadBuffer, ioWriteBuffer, pidsBuffer, networkReceivedBuffer, networkSentBuffer]; + let yPosition = titleHeight + 20; + + for (const imageBuffer of graphs) { + const img = await loadImage(imageBuffer); + ctx.drawImage(img, 0, yPosition, metricWidth, metricHeight); + yPosition += metricHeight + graphMargin; + } + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating full report.'); + } +}); + +const Docker = require('dockerode'); +const docker = new Docker(); // Make sure Docker is properly configured + +app.get('/api/graph/full-report/:containerId/live', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 2; + const maxPoints = 30; // Limit to the last 120 seconds (2 minutes) + + const html = ` + + + + + + Live Report for ${containerId} + + + + + + +
+

Live Report for ${containerId}

+
+
+

CPU Usage

+
+ +
+
+
+

Memory Usage

+
+ +
+
+
+

Network Traffic

+
+ +
+
+
+ + + + + + + + + + + + + + +
PIDUserCommand
+
+ +
+ + + + + `; + + res.send(html); +}); + + + + +app.get('/api/processes/:containerId', async (req, res) => { + const { containerId } = req.params; + + // Fetch processes running in the container + let processList = []; + try { + const container = docker.getContainer(containerId); + const processes = await container.top(); // Fetch running processes in the container +// console.log(processes) + processList = processes.Processes || []; + } catch (err) { + console.error(`Error fetching processes for container ${containerId}:`, err); + return res.status(500).json({ error: 'Failed to fetch processes' }); + } + + // Send the process list as a JSON response + res.json(processList); +}); + +app.listen(port, "0.0.0.0", () => { + console.log(`Server running on http://localhost:${port}`); +}); \ No newline at end of file diff --git a/live-view.js b/live-view.js new file mode 100644 index 0000000..a8514f6 --- /dev/null +++ b/live-view.js @@ -0,0 +1,823 @@ +const express = require('express'); +const { ChartJSNodeCanvas } = require('chartjs-node-canvas'); +const { createCanvas, loadImage } = require('canvas'); +const cors = require('cors'); +const Docker = require('dockerode'); +const WebSocket = require('ws'); + +const app = express(); +const port = 6667; +app.use(cors()); + +const docker = new Docker(); +const wss = new WebSocket.Server({ noServer: true }); + +const metricWidth = 1900; +const metricHeight = 400; +const titleHeight = 100; +const graphMargin = 30; + +app.use((req, res, next) => { + res.set({ + 'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate', + 'Pragma': 'no-cache', + 'Expires': '0', + 'Surrogate-Control': 'no-store' + }); + next(); +}); + +const chartJSMetricCanvas = new ChartJSNodeCanvas({ width: metricWidth, height: metricHeight, backgroundColour: 'black' }); + +const fetchMetricData = async (metric, containerId, timeframe = 5) => { + try { + const container = docker.getContainer(containerId); + const statsStream = await container.stats({ stream: true }); + + let metrics = []; + const endTime = Date.now(); + const startTime = endTime - (timeframe * 60 * 1000); + + return new Promise((resolve, reject) => { + statsStream.on('data', (chunk) => { + const stat = JSON.parse(chunk.toString()); + const timestamp = new Date(stat.read).getTime(); + + if (timestamp < startTime) return; + + let value; + switch (metric) { + case 'cpu': + const cpuDelta = stat.cpu_stats.cpu_usage.total_usage - (stat.precpu_stats.cpu_usage?.total_usage || 0); + const systemDelta = stat.cpu_stats.system_cpu_usage - (stat.precpu_stats.system_cpu_usage || 0); + value = cpuDelta / systemDelta * stat.cpu_stats.online_cpus * 100; + break; + case 'memory': + const activeAnon = stat.memory_stats.stats?.active_anon || 0; + const inactiveAnon = stat.memory_stats.stats?.inactive_anon || 0; + const slab = stat.memory_stats.stats?.slab || 0; + const kernelStack = stat.memory_stats.stats?.kernel_stack || 0; + const residentMemory = activeAnon + inactiveAnon + slab + kernelStack; + value = residentMemory / 1024 / 1024; // Convert to MB + // console.log(`API Memory usage for ${containerId}: ${value.toFixed(2)} MB, Raw stats:`, stat.memory_stats); + break; + case 'io': + const read = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Read')?.value || 0; + const write = stat.blkio_stats.io_service_bytes_recursive?.find(s => s.op === 'Write')?.value || 0; + value = { read: read / 1024 / 1024, write: write / 1024 / 1024 }; // Convert to MB + break; + case 'pids': + value = stat.pids_stats.current || 0; + break; + case 'network': + const rx = stat.networks?.eth0?.rx_bytes || 0; + const tx = stat.networks?.eth0?.tx_bytes || 0; + value = { received: rx / 1024, sent: tx / 1024 }; // Convert to KB + break; + } + + metrics.push([Math.floor(timestamp / 1000), value]); + + if (timestamp >= endTime) { + statsStream.destroy(); + resolve({ data: metrics }); + } + }); + + statsStream.on('error', reject); + }); + } catch (error) { + console.error(`Error fetching ${metric} data for container ${containerId}:`, error); + throw new Error(`Failed to fetch ${metric} data.`); + } +}; + +const extractMetrics = (data, metric) => { + const labels = data.data.map((entry) => new Date(entry[0] * 1000).toLocaleTimeString()); + let values; + + switch (metric) { + case 'cpu': + values = data.data.map(entry => entry[1]); + break; + case 'memory': + values = data.data.map(entry => entry[1]); + break; + case 'io': + values = { + read: data.data.map(entry => entry[1].read), + write: data.data.map(entry => entry[1].write), + }; + break; + case 'pids': + values = data.data.map(entry => entry[1]); + break; + case 'network': + values = { + received: data.data.map(entry => entry[1].received), + sent: data.data.map(entry => -entry[1].sent), // Negate sent values + }; + break; + default: + values = []; + } + + return { labels, values }; +}; + +const generateMetricGraph = async (metric, labels, label, borderColor) => { + const configuration = { + type: 'line', + data: { + labels: labels, + datasets: [{ + label: label, + data: metric, + borderColor: borderColor, + fill: false, + tension: 0.1, + }], + }, + options: { + scales: { + x: { + title: { + display: true, + text: 'Time', + color: 'white', + }, + }, + y: { + title: { + display: true, + text: `${label} Usage`, + color: 'white', + }, + }, + }, + plugins: { + legend: { + labels: { + color: 'white', + }, + }, + }, + }, + }; + + return chartJSMetricCanvas.renderToBuffer(configuration); +}; + +const drawTitle = (ctx, text, yPos) => { + ctx.fillStyle = 'white'; + ctx.font = 'bold 40px Arial'; + const textWidth = ctx.measureText(text).width; + ctx.fillText(text, (metricWidth - textWidth) / 2, yPos); +}; + +// CPU Usage +app.get('/api/graph/cpu/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('cpu', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'cpu'); + const imageBuffer = await generateMetricGraph(values, labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating CPU graph: ${error.message}`); + } +}); + +// Memory Usage +app.get('/api/graph/memory/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('memory', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'memory'); + const imageBuffer = await generateMetricGraph(values, labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating memory graph: ${error.message}`); + } +}); + +// Disk I/O +app.get('/api/graph/io/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('io', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'io'); + const readBuffer = await generateMetricGraph(values.read, labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)'); + const writeBuffer = await generateMetricGraph(values.write, labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)'); + + const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Disk Read for ${containerId}`, 40); + let img = await loadImage(readBuffer); + ctx.drawImage(img, 0, 50, metricWidth, metricHeight); + + drawTitle(ctx, `Disk Write for ${containerId}`, metricHeight + 100); + img = await loadImage(writeBuffer); + ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating disk I/O graphs.'); + } +}); + +// PIDs +app.get('/api/graph/pids/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('pids', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'pids'); + const imageBuffer = await generateMetricGraph(values, labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); + res.set('Content-Type', 'image/png'); + res.send(imageBuffer); + } catch (error) { + res.status(500).send(`Error generating PIDs graph: ${error.message}`); + } +}); + +// Network Traffic +app.get('/api/graph/network/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const data = await fetchMetricData('network', containerId, timeframe); + if (format === 'json') { + return res.json(data); + } + + const { labels, values } = extractMetrics(data, 'network'); + + const receivedBuffer = await generateMetricGraph(values.received, labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)'); + const sentBuffer = await generateMetricGraph(values.sent, labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)'); + + const canvas = createCanvas(metricWidth, metricHeight * 2 + 100); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Network Received for ${containerId}`, 40); + let img = await loadImage(receivedBuffer); + ctx.drawImage(img, 0, 50, metricWidth, metricHeight); + + drawTitle(ctx, `Network Sent for ${containerId}`, metricHeight + 100); + img = await loadImage(sentBuffer); + ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight); + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating network graphs.'); + } +}); + +// Full Report +app.get('/api/graph/full-report/:containerId', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 5; + const format = req.query.format || 'graph'; + + try { + const cpuData = await fetchMetricData('cpu', containerId, timeframe); + const memoryData = await fetchMetricData('memory', containerId, timeframe); + const ioData = await fetchMetricData('io', containerId, timeframe); + const pidsData = await fetchMetricData('pids', containerId, timeframe); + const networkData = await fetchMetricData('network', containerId, timeframe); + + if (format === 'json') { + return res.json({ + cpu: cpuData, + memory: memoryData, + io: ioData, + pids: pidsData, + network: networkData, + }); + } + + const cpuMetrics = extractMetrics(cpuData, 'cpu'); + const memoryMetrics = extractMetrics(memoryData, 'memory'); + const ioMetrics = extractMetrics(ioData, 'io'); + const pidsMetrics = extractMetrics(pidsData, 'pids'); + const networkMetrics = extractMetrics(networkData, 'network'); + + const cpuBuffer = await generateMetricGraph(cpuMetrics.values, cpuMetrics.labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)'); + const memoryBuffer = await generateMetricGraph(memoryMetrics.values, memoryMetrics.labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)'); + const ioReadBuffer = await generateMetricGraph(ioMetrics.values.read, ioMetrics.labels, 'Disk Read (MB)', 'rgba(54, 255, 132, 1)'); + const ioWriteBuffer = await generateMetricGraph(ioMetrics.values.write, ioMetrics.labels, 'Disk Write (MB)', 'rgba(255, 99, 255, 1)'); + const pidsBuffer = await generateMetricGraph(pidsMetrics.values, pidsMetrics.labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)'); + const networkReceivedBuffer = await generateMetricGraph(networkMetrics.values.received, networkMetrics.labels, 'Network Received (KB)', 'rgba(75, 192, 192, 1)'); + const networkSentBuffer = await generateMetricGraph(networkMetrics.values.sent, networkMetrics.labels, 'Network Sent (KB)', 'rgba(255, 159, 64, 1)'); + + const numGraphs = 7; + const fullReportHeight = titleHeight + (numGraphs * (metricHeight + graphMargin)); + + const canvas = createCanvas(metricWidth, fullReportHeight); + const ctx = canvas.getContext('2d'); + ctx.fillStyle = 'black'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + drawTitle(ctx, `Full Report for ${containerId} (Last ${timeframe} minutes)`, 50); + + const graphs = [cpuBuffer, memoryBuffer, ioReadBuffer, ioWriteBuffer, pidsBuffer, networkReceivedBuffer, networkSentBuffer]; + let yPosition = titleHeight + 20; + + for (const imageBuffer of graphs) { + const img = await loadImage(imageBuffer); + ctx.drawImage(img, 0, yPosition, metricWidth, metricHeight); + yPosition += metricHeight + graphMargin; + } + + res.set('Content-Type', 'image/png'); + res.send(canvas.toBuffer()); + } catch (error) { + res.status(500).send('Error generating full report.'); + } +}); + +// Live Report with WebSocket +app.get('/api/graph/full-report/:containerId/live', async (req, res) => { + const { containerId } = req.params; + const timeframe = parseInt(req.query.timeframe) || 2; + const maxPoints = 30; + + const html = ` + + + + + + Live Report for ${containerId} + + + + + + +
+

Live Report for ${containerId}

+
+
+

CPU Usage

+
+ +
+
+
+

Memory Usage

+
+ +
+
+
+

Network Speed

+
+ +
+
+
+ + + + + + + + + + +
PIDUserCommand
+
+
+ + + + `; + + res.send(html); +}); + +app.get('/api/processes/:containerId', async (req, res) => { + const { containerId } = req.params; + let processList = []; + try { + const container = docker.getContainer(containerId); + const processes = await container.top(); + processList = processes.Processes || []; + } catch (err) { + console.error(`Error fetching processes for container ${containerId}:`, err); + return res.status(500).json({ error: 'Failed to fetch processes' }); + } + res.json(processList); +}); + +// WebSocket handling +const server = app.listen(port, "0.0.0.0", () => { + console.log(`Server running on http://localhost:${port}`); +}); + +server.on('upgrade', (request, socket, head) => { + const pathname = request.url; + if (pathname.startsWith('/ws/')) { + wss.handleUpgrade(request, socket, head, (ws) => { + wss.emit('connection', ws, request); + }); + } else { + socket.destroy(); + } +}); + +wss.on('connection', (ws, request) => { + const containerId = request.url.split('/').pop(); + let statsStream; + let prevNetwork = { rx: 0, tx: 0, timestamp: 0 }; + + const sendStats = async () => { + try { + const container = docker.getContainer(containerId); + statsStream = await container.stats({ stream: true }); + + statsStream.on('data', async (chunk) => { + const stat = JSON.parse(chunk.toString()); + const currentTimestamp = new Date(stat.read).getTime() / 1000; // seconds + + const cpuDelta = stat.cpu_stats.cpu_usage.total_usage - (stat.precpu_stats.cpu_usage?.total_usage || 0); + const systemDelta = stat.cpu_stats.system_cpu_usage - (stat.precpu_stats.system_cpu_usage || 0); + const cpu = cpuDelta / systemDelta * stat.cpu_stats.online_cpus * 100; + + const activeAnon = stat.memory_stats.stats?.active_anon || 0; + const inactiveAnon = stat.memory_stats.stats?.inactive_anon || 0; + const slab = stat.memory_stats.stats?.slab || 0; + const kernelStack = stat.memory_stats.stats?.kernel_stack || 0; + const residentMemory = activeAnon + inactiveAnon + slab + kernelStack; + const memory = residentMemory / 1024 / 1024; // Convert to MB + // console.log(`Live Memory usage for ${containerId}: ${memory.toFixed(2)} MB, Raw stats:`, stat.memory_stats); + + const rx = stat.networks?.eth0?.rx_bytes || 0; + const tx = stat.networks?.eth0?.tx_bytes || 0; + + let network = { received: 0, sent: 0 }; + if (prevNetwork.timestamp !== 0) { + const timeDiff = currentTimestamp - prevNetwork.timestamp; // seconds + if (timeDiff > 0) { + network.received = (rx - prevNetwork.rx) / 1024 / timeDiff; // KB/s + network.sent = -(tx - prevNetwork.tx) / 1024 / timeDiff; // Negate sent (KB/s) + } + } + prevNetwork = { rx, tx, timestamp: currentTimestamp }; + + let processes = []; + try { + const processData = await container.top(); + processes = processData.Processes || []; + } catch (err) { + console.error(`Error fetching processes for ${containerId}:`, err); + processes = []; + } + + ws.send(JSON.stringify({ + timestamp: Math.floor(currentTimestamp), + cpu, + memory, + network, + processes + })); + }); + + statsStream.on('error', (err) => { + console.error(`Stats stream error for ${containerId}:`, err); + ws.close(); + }); + } catch (err) { + console.error(`Error setting up stats for ${containerId}:`, err); + ws.close(); + } + }; + + sendStats(); + + ws.on('close', () => { + if (statsStream) statsStream.destroy(); + }); +}); \ No newline at end of file diff --git a/package.json b/package.json new file mode 100644 index 0000000..e9b9e80 --- /dev/null +++ b/package.json @@ -0,0 +1,14 @@ +{ + "dependencies": { + "axios": "^1.7.7", + "chartjs-node-canvas": "^4.1.6", + "cors": "^2.8.5", + "dockerode": "^4.0.6", + "dotenv": "^16.5.0", + "express": "^4.21.1", + "express-rate-limit": "^7.5.0", + "winston": "^3.17.0", + "ws": "^8.18.2", + "yup": "^1.6.1" + } +} diff --git a/public/live.html b/public/live.html new file mode 100644 index 0000000..e66d649 --- /dev/null +++ b/public/live.html @@ -0,0 +1,21 @@ + + + + + + Live Docker Stats + + + + + +
+

Live Docker Stats

+ + + +
+ + + + diff --git a/public/live.js b/public/live.js new file mode 100644 index 0000000..e3ed867 --- /dev/null +++ b/public/live.js @@ -0,0 +1,157 @@ +const socket = io(); +const containerName = window.location.pathname.split('/').pop(); + +// Chart.js setup for CPU +const cpuCtx = document.getElementById('cpuChart').getContext('2d'); +const cpuChart = new Chart(cpuCtx, { + type: 'line', + data: { + labels: [], + datasets: [{ + label: 'CPU Usage (%)', + borderColor: 'rgba(255, 99, 132, 1)', + data: [], + fill: false, + tension: 0.1 + }] + }, + options: { + scales: { + x: { + title: { + display: true, + text: 'Time', + color: 'white', + }, + }, + y: { + title: { + display: true, + text: 'CPU Usage (%)', + color: 'white', + }, + ticks: { + color: 'white', + } + }, + }, + plugins: { + legend: { + labels: { + color: 'white', + }, + }, + }, + } +}); + +// Chart.js setup for Memory +const memoryCtx = document.getElementById('memoryChart').getContext('2d'); +const memoryChart = new Chart(memoryCtx, { + type: 'line', + data: { + labels: [], + datasets: [{ + label: 'Memory Usage (MB)', + borderColor: 'rgba(54, 162, 235, 1)', + data: [], + fill: false, + tension: 0.1 + }] + }, + options: { + scales: { + x: { + title: { + display: true, + text: 'Time', + color: 'white', + }, + }, + y: { + title: { + display: true, + text: 'Memory Usage (MB)', + color: 'white', + }, + ticks: { + color: 'white', + } + }, + }, + plugins: { + legend: { + labels: { + color: 'white', + }, + }, + }, + } +}); + +// Chart.js setup for Network +const networkCtx = document.getElementById('networkChart').getContext('2d'); +const networkChart = new Chart(networkCtx, { + type: 'line', + data: { + labels: [], + datasets: [{ + label: 'Network Usage (KB)', + borderColor: 'rgba(75, 192, 192, 1)', + data: [], + fill: false, + tension: 0.1 + }] + }, + options: { + scales: { + x: { + title: { + display: true, + text: 'Time', + color: 'white', + }, + }, + y: { + title: { + display: true, + text: 'Network Usage (KB)', + color: 'white', + }, + ticks: { + color: 'white', + } + }, + }, + plugins: { + legend: { + labels: { + color: 'white', + }, + }, + }, + } +}); + +// Start live updates +socket.emit('startLive', containerName); + +// Listen for updates from the server +socket.on('updateStats', (stats) => { + updateChart(cpuChart, stats.time, stats.cpu); + updateChart(memoryChart, stats.time, stats.memory); + updateChart(networkChart, stats.time, stats.network); +}); + +// Helper function to update the chart +function updateChart(chart, label, data) { + chart.data.labels.push(label); + chart.data.datasets[0].data.push(data); + + if (chart.data.labels.length > 30) { + chart.data.labels.shift(); + chart.data.datasets[0].data.shift(); + } + + chart.update(); +}