llama-cpp-python-djs-bot/huggingface-config/Dockerfile

39 lines
739 B
Docker
Raw Normal View History

2023-05-05 19:41:03 -04:00
FROM ubuntu:latest
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive apt install curl sudo -y
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -
RUN DEBIAN_FRONTEND=noninteractive apt install nodejs -y
RUN npm i pm2 -g
RUN mkdir -p /code/.pm2
RUN mkdir -p /.pm2
RUN chmod 0777 /code/.pm2
RUN chmod 0777 /.pm2
RUN DEBIAN_FRONTEND=noninteractive apt install wget python3 python3-pip -y
WORKDIR /code
RUN pip install --no-cache-dir llama-cpp-python[server]
COPY . .
RUN npm i
ENV HOST localhost
ENV PORT 7860
ENV MODEL=/code/ggml-vic7b-q4_0.bin
ENV CACHE=1
ENV PM2_HOME=/code/.pm2
RUN wget https://huggingface.co/eachadea/ggml-vicuna-7b-1.1/resolve/main/ggml-vic7b-q4_0.bin
CMD /bin/bash /code/start.sh