From 4e693295019636debc11113317f2e24733c65918 Mon Sep 17 00:00:00 2001 From: Raven Scott Date: Mon, 17 Apr 2023 02:30:38 +0200 Subject: [PATCH] Adding Memory usage to generation --- llamabot.js | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/llamabot.js b/llamabot.js index deda2a4..7122265 100644 --- a/llamabot.js +++ b/llamabot.js @@ -4,7 +4,12 @@ import { emptyResponses } from './assets/emptyMessages.js'; import { resetResponses, userResetMessages } from './assets/resetMessages.js'; import { errorMessages, busyResponses } from './assets/errorMessages.js'; import os from 'os'; -import { Client, GatewayIntentBits, ActivityType, Partials } from 'discord.js'; +import { + Client, + GatewayIntentBits, + ActivityType, + Partials +} from 'discord.js'; const client = new Client({ intents: [ @@ -177,7 +182,10 @@ async function generateResponse(conversation, message) { // define a function that shows the system load percentage and updates the message const showSystemLoad = async () => { const systemLoad = os.loadavg()[0] / os.cpus().length * 100; - const messageData = `Please wait, I am thinking... System Load: ${systemLoad.toFixed(2)}%`; + const freeMemory = os.freemem() / 1024 / 1024; + const totalMemory = os.totalmem() / 1024 / 1024; + const usedMemory = totalMemory - freeMemory; + const messageData = `Please wait, I am thinking...\nSystem Load: ${systemLoad.toFixed(2)}%\nMemory Usage: ${usedMemory.toFixed(2)} MB / ${totalMemory.toFixed(2)} MB`; // if the message object doesn't exist, create it if (!botMessage) { @@ -190,8 +198,8 @@ async function generateResponse(conversation, message) { // call the function initially await showSystemLoad(); - // refresh the system load percentage and update the message every 5 seconds - const refreshInterval = setInterval(showSystemLoad, 5000); + // refresh the system load percentage and update the message every 7 seconds + const refreshInterval = setInterval(showSystemLoad, 7000); try { const response = await fetch(`http://${process.env.ROOT_IP}:${process.env.ROOT_PORT}/v1/chat/completions`, {