From a6117c729cda22b3d655348f04936224f9bc8a5e Mon Sep 17 00:00:00 2001 From: Raven Scott Date: Tue, 11 Apr 2023 23:46:58 +0200 Subject: [PATCH] update readme --- README.md | 13 +++++++++++++ llamabot.js | 6 +++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 91c37ac..6b79173 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,19 @@ Define a generateResponse function that sends a request to the GPT-3 API to gene Call the generateResponse function within the messageCreate event listener function. +# Backend REQUIIRED + +The HTTP Server from https://abetlen.github.io/llama-cpp-python/ is required to use this bot. + +llama-cpp-python offers a web server which aims to act as a drop-in replacement for the OpenAI API. This allows you to use llama.cpp compatible models with any OpenAI compatible client (language libraries, services, etc). + +To install the server package and get started: + +pip install llama-cpp-python[server] +export MODEL=./models/your_model.py +python3 -m llama_cpp.server +Navigate to http://localhost:8000/docs to see the OpenAPI documentation. + # Usage 1) Use ```npm i ``` diff --git a/llamabot.js b/llamabot.js index 153ef3f..f9d5519 100644 --- a/llamabot.js +++ b/llamabot.js @@ -111,15 +111,15 @@ client.on('messageCreate', async (message) => { if (conversation.messages.length === 0) { conversation.messages.push({ role: 'user', - content: `You are rAi, the most helpful writing AI, you code, write and provide information without any mistakes.` + content: `Your name is rAi, you code, write and provide any information without any mistakes.` }); conversation.messages.push({ role: 'user', - content: `My name is ${message.author.username}. \nIf you address me, tag me using @${message.author.username}` + content: `My name is ${message.author.username}.` }); conversation.messages.push({ role: 'assistant', - content: `Hello, ${message.author.username}, I am here to answer any question you have, how may I help you?` + content: `Hello, ${message.author.username}, how may I help you?` }); }