to python3
Browse files- startup.sh +4 -4
startup.sh
CHANGED
@@ -8,20 +8,20 @@ echo "LS: $(ls -als)"
|
|
8 |
|
9 |
while true; do nvidia-smi; sleep 600; done &
|
10 |
|
11 |
-
|
12 |
print(f\"is availeble = {torch.cuda.is_available()}\"); \
|
13 |
print(f\"device count = {torch.cuda.device_count()}\"); \
|
14 |
print(f\"current device = {torch.cuda.current_device()}\")"
|
15 |
|
16 |
-
|
17 |
P1=$!
|
18 |
|
19 |
sleep 30
|
20 |
|
21 |
-
|
22 |
P2=$!
|
23 |
|
24 |
-
|
25 |
P3=$!
|
26 |
# python -m interactive_demo --port 40001 --model_family llava-v15 --model_id llava-v1.5-7b --model_dir liuhaotian/llava-v1.5-7b &
|
27 |
# P4=$!
|
|
|
8 |
|
9 |
while true; do nvidia-smi; sleep 600; done &
|
10 |
|
11 |
+
python3 -c "import torch; \
|
12 |
print(f\"is availeble = {torch.cuda.is_available()}\"); \
|
13 |
print(f\"device count = {torch.cuda.device_count()}\"); \
|
14 |
print(f\"current device = {torch.cuda.current_device()}\")"
|
15 |
|
16 |
+
python3 -m serve.controller --host 0.0.0.0 --port 10000 &
|
17 |
P1=$!
|
18 |
|
19 |
sleep 30
|
20 |
|
21 |
+
python3 -m serve.gradio_web_server --controller http://127.0.0.1:10000 --model-list-mode reload --share &
|
22 |
P2=$!
|
23 |
|
24 |
+
python3 -m interactive_demo --port 40000 --model_id prism-dinosiglip+7b &
|
25 |
P3=$!
|
26 |
# python -m interactive_demo --port 40001 --model_family llava-v15 --model_id llava-v1.5-7b --model_dir liuhaotian/llava-v1.5-7b &
|
27 |
# P4=$!
|