360 lines
13 KiB
JavaScript
360 lines
13 KiB
JavaScript
const express = require('express');
|
|
const { ChartJSNodeCanvas } = require('chartjs-node-canvas');
|
|
const { createCanvas, loadImage } = require('canvas');
|
|
const axios = require('axios');
|
|
const cors = require('cors'); // Import CORS middleware
|
|
require('dotenv').config()
|
|
|
|
const app = express();
|
|
const port = 6666;
|
|
app.use(cors()); // Allows all origins (wildcard *)
|
|
|
|
const metricWidth = 1900;
|
|
const metricHeight = 400;
|
|
const titleHeight = 100;
|
|
const graphMargin = 30;
|
|
|
|
app.use((req, res, next) => {
|
|
res.set({
|
|
'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate',
|
|
'Pragma': 'no-cache',
|
|
'Expires': '0',
|
|
'Surrogate-Control': 'no-store'
|
|
});
|
|
next();
|
|
});
|
|
|
|
const chartJSMetricCanvas = new ChartJSNodeCanvas({ width: metricWidth, height: metricHeight, backgroundColour: 'black' });
|
|
|
|
const getEndpoints = (containerId, timeframe) => {
|
|
const after = -(timeframe * 60);
|
|
return {
|
|
cpu: `${process.env.API_BASE_URL}/api/v3/data?chart=cgroup_${containerId}.cpu&format=json&after=${after}&dimensions=user,system`,
|
|
memory: `${process.env.API_BASE_URL}/api/v3/data?chart=cgroup_${containerId}.mem_usage&format=json&after=${after}&dimensions=used`,
|
|
io: `${process.env.API_BASE_URL}/api/v3/data?chart=cgroup_${containerId}.io&format=json&after=${after}&dimensions=read,write`,
|
|
pids: `${process.env.API_BASE_URL}/api/v3/data?chart=cgroup_${containerId}.pids_current&format=json&after=${after}&dimensions=current`,
|
|
network: `${process.env.API_BASE_URL}/api/v3/data?chart=cgroup_${containerId}.net_eth0&format=json&after=${after}&dimensions=received,sent`,
|
|
};
|
|
};
|
|
|
|
const fetchMetricData = async (metric, containerId, timeframe = 5) => {
|
|
const endpoints = getEndpoints(containerId, timeframe);
|
|
try {
|
|
const response = await axios.get(endpoints[metric]);
|
|
return response.data;
|
|
} catch (error) {
|
|
console.error(`Error fetching ${metric} data for container ${containerId}:`, error);
|
|
throw new Error(`Failed to fetch ${metric} data.`);
|
|
}
|
|
};
|
|
|
|
const extractMetrics = (data, metric) => {
|
|
const labels = data.result.data.map((entry) => new Date(entry[0] * 1000).toLocaleTimeString());
|
|
let values;
|
|
|
|
switch (metric) {
|
|
case 'cpu':
|
|
values = data.result.data.map(entry => entry[1] + entry[2]);
|
|
break;
|
|
case 'memory':
|
|
values = data.result.data.map(entry => entry[1] / 1024); // Convert KiB to MB
|
|
break;
|
|
case 'io':
|
|
values = {
|
|
read: data.result.data.map(entry => entry[1] / 1024), // Convert KiB/s to MB/s
|
|
write: data.result.data.map(entry => -entry[2] / 1024), // Convert KiB/s to MB/s and make positive
|
|
};
|
|
break;
|
|
case 'pids':
|
|
values = data.result.data.map(entry => entry[1]);
|
|
break;
|
|
case 'network':
|
|
values = {
|
|
received: data.result.data.map(entry => entry[1] / 8), // Convert Kbits/s to KB/s
|
|
sent: data.result.data.map(entry => -entry[2] / 8), // Convert Kbits/s to KB/s and make positive
|
|
};
|
|
break;
|
|
default:
|
|
values = [];
|
|
}
|
|
|
|
return { labels, values };
|
|
};
|
|
|
|
const generateMetricGraph = async (metric, labels, label, borderColor) => {
|
|
const configuration = {
|
|
type: 'line',
|
|
data: {
|
|
labels: labels,
|
|
datasets: [{
|
|
label: label,
|
|
data: metric,
|
|
borderColor: borderColor,
|
|
fill: false,
|
|
tension: 0.1,
|
|
}],
|
|
},
|
|
options: {
|
|
scales: {
|
|
x: {
|
|
title: {
|
|
display: true,
|
|
text: 'Time',
|
|
color: 'white',
|
|
},
|
|
},
|
|
y: {
|
|
title: {
|
|
display: true,
|
|
text: `${label} Usage`,
|
|
color: 'white',
|
|
},
|
|
},
|
|
},
|
|
plugins: {
|
|
legend: {
|
|
labels: {
|
|
color: 'white',
|
|
},
|
|
},
|
|
},
|
|
},
|
|
};
|
|
|
|
return chartJSMetricCanvas.renderToBuffer(configuration);
|
|
};
|
|
|
|
// Draw title on the canvas
|
|
const drawTitle = (ctx, text, yPos) => {
|
|
ctx.fillStyle = 'white'; // Set text color
|
|
ctx.font = 'bold 40px Arial'; // Set font size and style
|
|
const textWidth = ctx.measureText(text).width; // Measure the width of the text
|
|
ctx.fillText(text, (metricWidth - textWidth) / 2, yPos); // Center the text horizontally
|
|
};
|
|
|
|
|
|
// CPU Usage
|
|
app.get('/api/graph/cpu/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('cpu', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'cpu');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating CPU graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Memory Usage
|
|
app.get('/api/graph/memory/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('memory', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'memory');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating memory graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Disk I/O
|
|
app.get('/api/graph/io/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('io', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'io');
|
|
const readBuffer = await generateMetricGraph(values.read, labels, 'Disk Read (MB/s)', 'rgba(54, 255, 132, 1)');
|
|
const writeBuffer = await generateMetricGraph(values.write, labels, 'Disk Write (MB/s)', 'rgba(255, 99, 255, 1)');
|
|
|
|
const canvas = createCanvas(metricWidth, metricHeight * 2 + 100);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Disk Read for ${containerId}`, 40);
|
|
let img = await loadImage(readBuffer);
|
|
ctx.drawImage(img, 0, 50, metricWidth, metricHeight);
|
|
|
|
drawTitle(ctx, `Disk Write for ${containerId}`, metricHeight + 100);
|
|
img = await loadImage(writeBuffer);
|
|
ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight);
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating disk I/O graphs.');
|
|
}
|
|
});
|
|
|
|
// PIDs
|
|
app.get('/api/graph/pids/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('pids', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'pids');
|
|
const imageBuffer = await generateMetricGraph(values, labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)');
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(imageBuffer);
|
|
} catch (error) {
|
|
res.status(500).send(`Error generating PIDs graph: ${error.message}`);
|
|
}
|
|
});
|
|
|
|
// Network Traffic
|
|
app.get('/api/graph/network/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const data = await fetchMetricData('network', containerId, timeframe);
|
|
if (format === 'json') {
|
|
return res.json(data);
|
|
}
|
|
|
|
const { labels, values } = extractMetrics(data, 'network');
|
|
|
|
const receivedBuffer = await generateMetricGraph(values.received, labels, 'Network Received (KB/s)', 'rgba(75, 192, 192, 1)');
|
|
const sentBuffer = await generateMetricGraph(values.sent, labels, 'Network Sent (KB/s)', 'rgba(255, 159, 64, 1)');
|
|
|
|
const canvas = createCanvas(metricWidth, metricHeight * 2 + 100);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Network Received for ${containerId}`, 40);
|
|
let img = await loadImage(receivedBuffer);
|
|
ctx.drawImage(img, 0, 50, metricWidth, metricHeight);
|
|
|
|
drawTitle(ctx, `Network Sent for ${containerId}`, metricHeight + 100);
|
|
img = await loadImage(sentBuffer);
|
|
ctx.drawImage(img, 0, metricHeight + 110, metricWidth, metricHeight);
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating network graphs.');
|
|
}
|
|
});
|
|
|
|
// Full Report
|
|
app.get('/api/graph/full-report/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
const timeframe = parseInt(req.query.timeframe) || 5;
|
|
const format = req.query.format || 'graph';
|
|
|
|
try {
|
|
const cpuData = await fetchMetricData('cpu', containerId, timeframe);
|
|
const memoryData = await fetchMetricData('memory', containerId, timeframe);
|
|
const ioData = await fetchMetricData('io', containerId, timeframe);
|
|
const pidsData = await fetchMetricData('pids', containerId, timeframe);
|
|
const networkData = await fetchMetricData('network', containerId, timeframe);
|
|
|
|
|
|
if (format === 'json') {
|
|
return res.json({
|
|
cpu: cpuData,
|
|
memory: memoryData,
|
|
io: ioData,
|
|
pids: pidsData,
|
|
network: networkData,
|
|
});
|
|
}
|
|
|
|
const cpuMetrics = extractMetrics(cpuData, 'cpu');
|
|
const memoryMetrics = extractMetrics(memoryData, 'memory');
|
|
const ioMetrics = extractMetrics(ioData, 'io');
|
|
const pidsMetrics = extractMetrics(pidsData, 'pids');
|
|
const networkMetrics = extractMetrics(networkData, 'network');
|
|
|
|
const cpuBuffer = await generateMetricGraph(cpuMetrics.values, cpuMetrics.labels, 'CPU Usage (%)', 'rgba(255, 99, 132, 1)');
|
|
const memoryBuffer = await generateMetricGraph(memoryMetrics.values, memoryMetrics.labels, 'Memory Usage (MB)', 'rgba(54, 162, 235, 1)');
|
|
const ioReadBuffer = await generateMetricGraph(ioMetrics.values.read, ioMetrics.labels, 'Disk Read (MB/s)', 'rgba(54, 255, 132, 1)');
|
|
const ioWriteBuffer = await generateMetricGraph(ioMetrics.values.write, ioMetrics.labels, 'Disk Write (MB/s)', 'rgba(255, 99, 255, 1)');
|
|
const pidsBuffer = await generateMetricGraph(pidsMetrics.values, pidsMetrics.labels, 'PIDs (Processes)', 'rgba(153, 102, 255, 1)');
|
|
const networkReceivedBuffer = await generateMetricGraph(networkMetrics.values.received, networkMetrics.labels, 'Network Received (KB/s)', 'rgba(75, 192, 192, 1)');
|
|
const networkSentBuffer = await generateMetricGraph(networkMetrics.values.sent, networkMetrics.labels, 'Network Sent (KB/s)', 'rgba(255, 159, 64, 1)');
|
|
|
|
const numGraphs = 7;
|
|
const fullReportHeight = titleHeight + (numGraphs * (metricHeight + graphMargin));
|
|
|
|
const canvas = createCanvas(metricWidth, fullReportHeight);
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.fillStyle = 'black';
|
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
|
drawTitle(ctx, `Full Report for ${containerId} (Last ${timeframe} minutes)`, 50);
|
|
|
|
const graphs = [cpuBuffer, memoryBuffer, ioReadBuffer, ioWriteBuffer, pidsBuffer, networkReceivedBuffer, networkSentBuffer];
|
|
let yPosition = titleHeight + 20;
|
|
|
|
for (const imageBuffer of graphs) {
|
|
const img = await loadImage(imageBuffer);
|
|
ctx.drawImage(img, 0, yPosition, metricWidth, metricHeight);
|
|
yPosition += metricHeight + graphMargin;
|
|
}
|
|
|
|
res.set('Content-Type', 'image/png');
|
|
res.send(canvas.toBuffer());
|
|
} catch (error) {
|
|
res.status(500).send('Error generating full report.');
|
|
}
|
|
});
|
|
|
|
const Docker = require('dockerode');
|
|
const docker = new Docker(); // Make sure Docker is properly configured
|
|
|
|
app.get('/api/processes/:containerId', async (req, res) => {
|
|
const { containerId } = req.params;
|
|
|
|
// Fetch processes running in the container
|
|
let processList = [];
|
|
try {
|
|
const container = docker.getContainer(containerId);
|
|
const processes = await container.top(); // Fetch running processes in the container
|
|
// console.log(processes)
|
|
processList = processes.Processes || [];
|
|
} catch (err) {
|
|
console.error(`Error fetching processes for container ${containerId}:`, err);
|
|
return res.status(500).json({ error: 'Failed to fetch processes' });
|
|
}
|
|
|
|
// Send the process list as a JSON response
|
|
res.json(processList);
|
|
});
|
|
|
|
app.listen(port, "0.0.0.0", () => {
|
|
console.log(`Server running on http://localhost:${port}`);
|
|
}); |