Spaces:
Running
on
Zero
Running
on
Zero
Commit
Β·
c920269
1
Parent(s):
71b7f64
test
Browse files- app.py +8 -4
- requirements.txt +1 -1
app.py
CHANGED
@@ -4,13 +4,17 @@ import torch
|
|
4 |
import subprocess
|
5 |
import numpy as np
|
6 |
|
7 |
-
|
8 |
-
print(
|
9 |
-
|
10 |
@spaces.GPU
|
11 |
def start_ochat_server():
|
12 |
-
|
13 |
|
|
|
|
|
|
|
|
|
14 |
# Command to start the ochat inference server
|
15 |
command = [
|
16 |
"python", "-m", "ochat.serving.openai_api_server",
|
|
|
4 |
import subprocess
|
5 |
import numpy as np
|
6 |
|
7 |
+
device_available = print(torch.cuda.is_available()) # <-- True π€
|
8 |
+
device_count = print(torch.cuda.device_count()) # <-- 1 π€
|
9 |
+
device_name = print(torch.cuda.get_device_name()) # <-- 'A10G' π€
|
10 |
@spaces.GPU
|
11 |
def start_ochat_server():
|
12 |
+
global device_available, device_count, device_name
|
13 |
|
14 |
+
print(device_available)
|
15 |
+
print(device_name) # <-- 'cuda:0' π€
|
16 |
+
print(device_count) # <-- 1 π€
|
17 |
+
|
18 |
# Command to start the ochat inference server
|
19 |
command = [
|
20 |
"python", "-m", "ochat.serving.openai_api_server",
|
requirements.txt
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
git+https://github.com/huggingface/transformers.git
|
2 |
-
git+https://github.com/vllm-project/vllm.git
|
3 |
--extra-index-url https://download.pytorch.org/whl/cu113
|
4 |
torch
|
|
|
5 |
datasets
|
6 |
accelerate
|
7 |
numpy
|
|
|
1 |
git+https://github.com/huggingface/transformers.git
|
|
|
2 |
--extra-index-url https://download.pytorch.org/whl/cu113
|
3 |
torch
|
4 |
+
git+https://github.com/vllm-project/vllm.git
|
5 |
datasets
|
6 |
accelerate
|
7 |
numpy
|