llama-cpp-python-djs-bot/default.env

29 lines
768 B
Bash
Raw Normal View History

# Discord Token
THE_TOKEN = "DISCORD_TOKEN_HERE"
# The Channel IDs the bot will operate in seperated by commas
CHANNEL_IDS = 1094494101631680653,1094628334727614605
# The INIT prompt for all conversations.
2023-04-16 18:50:04 -04:00
INIT_PROMPT = "Assistant name: ChatAI. You code, write and provide any information without any mistakes."
# Loading Emebed Refresh Timing
REFRESH_INTERVAL=10
# Max Content to fetch from given URLs
MAX_CONTENT_LENGTH=2000
# Max tokens for Generations
2023-04-26 20:56:27 -04:00
MAX_TOKENS = 1024
# ROOT_IP is only used when running the bot without docker compose
ROOT_IP = 192.168.0.15
# PORT is only used when running the bot without docker compose
ROOT_PORT = 8000
# Directory to your models (llama.cpp specfic settings)
DATA_DIR = /home/USERNAME/weights
CACHE = 1
N_THREADS = 4