version: '3.9' services: llama-python-server: container_name: llama-python-server restart: unless-stopped build: context: ./server env_file: .env volumes: - ${DATA_DIR}/weights:/usr/src/app/models environment: - HOST=llama-python-server - MODEL=./models/30B.bin llama-python-djs-bot: container_name: llama-python-djs-bot restart: unless-stopped build: context: . depends_on: - llama-python-server environment: - THE_TOKEN - CHANNEL_IDS - INIT_PROMPT - ROOT_IP=llama-python-server - ROOT_PORT=8000 - CACHE=1