Αυτή η υποβολή περιλαμβάνεται σε:
2026-02-05 20:01:33 +01:00
γονέας 6ae56c9cc1
υποβολή da4fe079bd

@@ -11,7 +11,7 @@ USER root
EXPOSE 8090
RUN apt-get update \
&& apt-get install -y curl unzip grep sed git ffmpeg nano python3-pip python3 python3-wheel \
&& apt-get install -y curl tar grep sed git ffmpeg nano python3-pip python3 python3-wheel \
&& pip install --break-system-packages --upgrade setuptools \
&& pip install --break-system-packages -U "huggingface_hub[cli]" \
&& if [ -f requirements.txt ]; then pip install --break-system-packages -r requirements.txt; fi \
@@ -26,9 +26,9 @@ WORKDIR /app
RUN VERSION=$(curl -s https://api.github.com/repos/ggml-org/llama.cpp/releases/latest | grep '"tag_name"' | head -1 | sed 's/.*"tag_name": "\([^"]*\)".*/\1/') \
&& echo "Last llama.cpp version: $VERSION" \
&& curl -L https://github.com/ggml-org/llama.cpp/releases/download/${VERSION}/llama-${VERSION}-bin-ubuntu-vulkan-x64.zip -o llama.zip \
&& unzip -j llama.zip -d . \
&& rm llama.zip
&& curl -L https://github.com/ggml-org/llama.cpp/releases/download/${VERSION}/llama-${VERSION}-bin-ubuntu-vulkan-x64.tar.gz -o llama.tar.gz \
&& tar -xzf llama.tar.gz -C . \
&& rm llama.tar.gz
RUN chmod +x /app/llama-server