dockerize

This commit is contained in:
Raven Scott 2023-04-12 16:17:18 +02:00
parent 19107543e9
commit ec7dbde761
9 changed files with 81 additions and 29 deletions

10
Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM node:slim
WORKDIR /app
COPY package*.json ./
RUN npm install --omit=dev
COPY . .
CMD node llamabot.js

View File

@ -1,4 +1,4 @@
const emptyResponses = [
export const emptyResponses = [
"Oh boy, this is a tough one! Unfortunately, I don't have much insight to share on this topic.",
"Hmm, I'm scratching my head on this one. Sorry to say that I don't have much to offer.",
"Wish I had a witty remark, but alas, I don't have much to contribute to this discussion.",
@ -28,5 +28,3 @@ const emptyResponses = [
"I'm afraid I don't have much experience with this, so I can't be of much help.",
"I wish I had some grand insight to share, but alas, I don't have much to offer in that regard."
];
module.exports = emptyResponses;

View File

@ -1,4 +1,4 @@
const errorMessages = [
export const errorMessages = [
"Uh oh, looks like something went awry! Try !reset to start fresh.",
"Oops, we hit a bump in the road! Give !reset a try to start anew.",
"We've encountered an error, but !reset can help us out! Give it a go.",
@ -21,7 +21,7 @@ const errorMessages = [
"Oopsie daisy! Give !reset a try and we'll start over."
];
const busyResponses = [
export const busyResponses = [
"Sorry about that! Looks like I'm tied up at the moment. Please try again later.",
"Oops, I'm currently busy with something else. Please try again later.",
"Looks like I'm already working on something. Can you try again later?",
@ -43,5 +43,3 @@ const errorMessages = [
"Looks like I'm currently engaged with something else. Please try again later.",
"I'm currently unavailable. Can you try again later?"
];
module.exports = { errorMessages, busyResponses};

View File

@ -1,4 +1,4 @@
const resetResponses = [
export const resetResponses = [
"Whoops, let's start fresh! What can I assist you with now?",
"Looks like we need a fresh start! What do you need help with?",
"To avoid any gremlins in the system, let's reset! How can I assist you now?",
@ -21,7 +21,7 @@ const resetResponses = [
"Let's hit the restart button to make sure we're on the right track. What can I help you with now?"
];
const userResetMessages = [
export const userResetMessages = [
"All good, we're starting fresh! How can I assist you?",
"Got it, let's start over! How can I help you today?",
"Alright, starting anew! What can I help you with?",
@ -43,5 +43,3 @@ const resetResponses = [
"Sure thing, we'll start over! What can I help you with today?",
"Conversation reset, confirmed! What do you need help with?"
];
module.exports = {resetResponses, userResetMessages};

View File

@ -2,3 +2,4 @@ THE_TOKEN = "DISCORD_TOKEN_HERE"
CHANNEL_IDS = 1094494101631680653,1094628334727614605
ROOT_IP = 192.168.0.15
ROOT_PORT = 8000
DATA_DIR = /home/USERNAME/weights

26
docker-compose.yml Normal file
View File

@ -0,0 +1,26 @@
version: '3.9'
services:
llama-python-server:
container_name: llama-python-server
restart: unless-stopped
build:
context: ./server
env_file: .env
volumes:
- ${DATA_DIR}/weights:/usr/src/app/models
environment:
- HOST=llama-python-server
- MODEL=./models/gpt4-x-alpaca-13b-native-4bit-128g.bin
llama-python-djs-bot:
container_name: llama-python-djs-bot
restart: unless-stopped
build:
context: .
depends_on:
- llama-python-server
environment:
- THE_TOKEN
- CHANNEL_IDS
- ROOT_IP=llama-python-server
- ROOT_PORT=8000

View File

@ -1,23 +1,16 @@
const Discord = require('discord.js');
const fetch = require('node-fetch');
const emptyResponses = require('./assets/emptyMessages.js');
const { resetResponses, userResetMessages } = require('./assets/resetMessages.js');
const { errorMessages, busyResponses } = require('./assets/errorMessages.js');
import "dotenv/config.js";
import fetch from 'node-fetch';
import { emptyResponses } from './assets/emptyMessages.js';
import { resetResponses, userResetMessages } from './assets/resetMessages.js';
import { errorMessages, busyResponses } from './assets/errorMessages.js';
require('dotenv').config()
const {
Client,
GatewayIntentBits,
ActivityType,
Partials
} = require('discord.js');
import { Client, GatewayIntentBits, ActivityType, Partials } from 'discord.js';
const client = new Client({
intents: [
GatewayIntentBits.DirectMessages,
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildBans,
GatewayIntentBits.GuildModeration,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.MessageContent,
],

17
package.json Normal file
View File

@ -0,0 +1,17 @@
{
"name": "llama-cpp-python-djs-bot",
"version": "1.0.0",
"description": "",
"main": "llamabot.js",
"type": "module",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"discord.js": "^14.9.0",
"dotenv": "^16.0.3",
"node-fetch": "^3.3.1"
}
}

11
server/Dockerfile Normal file
View File

@ -0,0 +1,11 @@
FROM python:bullseye
RUN apt-get update; \
apt-get install -y --no-install-recommends \
build-essential
WORKDIR /usr/src/app
RUN pip install --no-cache-dir llama-cpp-python[server]
CMD python3 -m llama_cpp.server