From 1d0b225438493ce552512e53108ca3d7b867597b Mon Sep 17 00:00:00 2001 From: Raven Scott Date: Mon, 17 Apr 2023 04:39:24 +0200 Subject: [PATCH] Bug Fix: Chunk messages if response is too large --- llamabot.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/llamabot.js b/llamabot.js index 3e2fe0b..54d9c99 100644 --- a/llamabot.js +++ b/llamabot.js @@ -149,7 +149,21 @@ client.on('messageCreate', async (message) => { if (response && response.trim()) { // Send response to user if it's not empty - await message.channel.send(response); + const limit = 1980; + + // if we are over the discord char limit we need chunks... + if (response.length > limit) { + const chunks = response.match(new RegExp(`.{1,${limit}}`, "g")); + for (let i = 0; i < chunks.length; i++) { + setTimeout(() => { + message.channel.send(chunks[i]); + }, i * 3000); // delay of 3 seconds between each chunk to save on API requests + } + } else { + // We are good to go, send the response + await message.channel.send(response); + } + setPresenceOnline() setBusy(message.author.id, false); } else {