vlm-demo / startup.sh
mattb512's picture
run docker as specific user
3567bb7
raw
history blame
No virus
526 Bytes
#!/bin/bash
python -m serve.controller --host 0.0.0.0 --port 10000 &
P1=$!
python -m serve.gradio_web_server --controller http://127.0.0.1:10000 --model-list-mode reload --share &
P2=$!
sleep 30
echo "PWD:"
pwd
echo "LS:"
ls -als
echo "TEST SECRET: $TEST_SECRET"
echo "ENV: $(env)"
echo $HF_TOKEN > .hf_token
python -m interactive_demo --port 40000 --model_id prism-dinosiglip+7b
python -m interactive_demo --port 40001 --model_family llava-v15 --model_id llava-v1.5-7b --model_dir liuhaotian/llava-v1.5-7b
wait $P1 $P2