update model var
This commit is contained in:
parent
d18cb39ecd
commit
012566e93c
@ -11,7 +11,6 @@ services:
|
|||||||
- ${DATA_DIR}:/usr/src/app/models
|
- ${DATA_DIR}:/usr/src/app/models
|
||||||
environment:
|
environment:
|
||||||
- HOST=llama-python-server
|
- HOST=llama-python-server
|
||||||
- MODEL=./models/gpt4-x-alpaca-13b-native-4bit-128g.bin
|
|
||||||
llama-python-djs-bot:
|
llama-python-djs-bot:
|
||||||
container_name: llama-python-djs-bot
|
container_name: llama-python-djs-bot
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "AI-API",
|
"name": "AI-API",
|
||||||
"script": "python3 -m llama_cpp.server",
|
"script": "python3 -m llama_cpp.server --model /code/ggml-vic7b-q4_0.bin",
|
||||||
"args" : ""
|
"args" : ""
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -8,4 +8,4 @@ WORKDIR /usr/src/app
|
|||||||
|
|
||||||
RUN pip install --no-cache-dir llama-cpp-python[server]
|
RUN pip install --no-cache-dir llama-cpp-python[server]
|
||||||
|
|
||||||
CMD python3 -m llama_cpp.server
|
CMD python3 -m llama_cpp.server --model /usr/src/app/models/gpt4-x-alpaca-13b-native-4bit-128g.bin
|
Loading…
Reference in New Issue
Block a user