llama-cpp-python-djs-bot/huggingface-config/Dockerfile

43 lines
817 B
Docker
Raw Normal View History

2023-05-05 23:41:03 +00:00
FROM ubuntu:latest
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive apt install curl sudo -y
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -
RUN DEBIAN_FRONTEND=noninteractive apt install nodejs -y
RUN npm i pm2 -g
RUN mkdir -p /code/.pm2
RUN mkdir -p /.pm2
RUN chmod 0777 /code/.pm2
RUN chmod 0777 /.pm2
2023-05-06 20:19:08 +00:00
RUN DEBIAN_FRONTEND=noninteractive apt install wget python3 python3-pip -y
2023-05-05 23:41:03 +00:00
WORKDIR /code
RUN pip install --no-cache-dir llama-cpp-python[server]
COPY . .
RUN npm i
ENV HOST localhost
ENV PORT 7860
ENV MODEL=/code/ggml-vic7b-q4_0.bin
ENV CACHE=1
2023-05-08 20:15:51 +00:00
ENV USE_MLOCK=0
ENV REPEAT_PENALTY=1
ENV MODEL=/code/ggml-vic7b-q4_0.bin
2023-05-05 23:41:03 +00:00
ENV PM2_HOME=/code/.pm2
2023-05-08 20:15:51 +00:00
2023-05-06 20:19:08 +00:00
RUN wget -q https://huggingface.co/eachadea/ggml-vicuna-7b-1.1/resolve/main/ggml-vic7b-q4_0.bin
2023-05-05 23:41:03 +00:00
CMD /bin/bash /code/start.sh