llama-cpp-python-djs-bot/huggingface-config/Dockerfile

43 lines
817 B
Docker
Raw Normal View History

2023-05-05 19:41:03 -04:00
FROM ubuntu:latest
RUN apt update
RUN DEBIAN_FRONTEND=noninteractive apt install curl sudo -y
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -
RUN DEBIAN_FRONTEND=noninteractive apt install nodejs -y
RUN npm i pm2 -g
RUN mkdir -p /code/.pm2
RUN mkdir -p /.pm2
RUN chmod 0777 /code/.pm2
RUN chmod 0777 /.pm2
2023-05-06 16:19:08 -04:00
RUN DEBIAN_FRONTEND=noninteractive apt install wget python3 python3-pip -y
2023-05-05 19:41:03 -04:00
WORKDIR /code
RUN pip install --no-cache-dir llama-cpp-python[server]
COPY . .
RUN npm i
ENV HOST localhost
ENV PORT 7860
ENV MODEL=/code/ggml-vic7b-q4_0.bin
ENV CACHE=1
2023-05-08 16:15:51 -04:00
ENV USE_MLOCK=0
ENV REPEAT_PENALTY=1
ENV MODEL=/code/ggml-vic7b-q4_0.bin
2023-05-05 19:41:03 -04:00
ENV PM2_HOME=/code/.pm2
2023-05-08 16:15:51 -04:00
2023-05-06 16:19:08 -04:00
RUN wget -q https://huggingface.co/eachadea/ggml-vicuna-7b-1.1/resolve/main/ggml-vic7b-q4_0.bin
2023-05-05 19:41:03 -04:00
CMD /bin/bash /code/start.sh