From af13ca37178705af1e64e553e015cecdacaafc89 Mon Sep 17 00:00:00 2001 From: Raven Scott Date: Fri, 5 May 2023 21:07:39 +0200 Subject: [PATCH] Move overflow delay from static to .env --- default.env | 5 +++++ llamabot.js | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/default.env b/default.env index 977c67f..8d88597 100644 --- a/default.env +++ b/default.env @@ -10,6 +10,11 @@ INIT_PROMPT = "Assistant name: ChatAI. You code, write and provide any informati # Loading Emebed Refresh Timing REFRESH_INTERVAL=10 +# When a message is too large for discord we chunk the response into seperate messages. +# To ensure we do not rate limit the bot we send these at a delay interval. +# DEFAULT: 3 a good setting is between 3 and 7 seconds. +OVERFLOW_DELAY=3 + # Max Content to fetch from given URLs MAX_CONTENT_LENGTH=2000 diff --git a/llamabot.js b/llamabot.js index 4c040c4..dee5824 100644 --- a/llamabot.js +++ b/llamabot.js @@ -157,7 +157,7 @@ client.on('messageCreate', async (message) => { for (let i = 0; i < chunks.length; i++) { setTimeout(() => { message.channel.send(chunks[i]); - }, i * 3000); // delay of 3 seconds between each chunk to save on API requests + }, i * (process.env.OVERFLOW_DELAY || 3) * 1000); // delay of 3 seconds between each chunk to save on API requests } } else { // We are good to go, send the response