2
0
Ficheiros
bdi_podman_serverconf/Services/llamacpp/Scripts/startChat.sh
2025-11-01 15:38:32 +01:00

22 linhas
551 B
Bash
Ficheiro executável

#!/bin/bash
# Report descrittivi: 0.6 ok; 0.55 più stabile
TEMP=${GENERAL_TEMP:-0.6}
exec /app/llama-server $CHAT_MODEL \
-c $GENERAL_CONTEXT_SIZE -ngl $GENERAL_GPU_LAYERS -n $GENERAL_MAX_TOKENS \
--temp $TEMP --top-p 0.9 --top-k 40 --repeat-penalty 1.1 \
--flash-attn auto --threads -1 --threads-batch -1 --threads-http -1 \
--jinja \
--timeout 600 --host 0.0.0.0 --port 8093 &
PID=$!
cleanup() {
echo "Stopping llama-server..."
kill $PID 2>/dev/null
wait $PID 2>/dev/null
exit 0
}
trap cleanup SIGTERM SIGINT
wait $PID