gradio_together_tgi / entrypoint.sh.template
chansung's picture
Update entrypoint.sh.template
bcaeac3 verified
raw
history blame
591 Bytes
#!/bin/bash
# Start the text-generation-inference process
if [[ "$QUANTIZE" != "False" ]]; then
text-generation-launcher --model-id ${MODEL_NAME} --num-shard 1 --port 8080 --trust-remote-code --quantize ${QUANTIZE} &
else
text-generation-launcher --model-id ${MODEL_NAME} --num-shard 1 --port 8080 --trust-remote-code &
# Wait for text-generation-inference to start
curl --retry 60 --retry-delay 10 --retry-connrefused http://127.0.0.1:8080/health
# Start the gradio
python app/main.py &
# Wait for any process to exit
wait -n
# Exit with status of process that exited first
exit $?