macadeliccc commited on
Commit
c920269
Β·
1 Parent(s): 71b7f64
Files changed (2) hide show
  1. app.py +8 -4
  2. requirements.txt +1 -1
app.py CHANGED
@@ -4,13 +4,17 @@ import torch
4
  import subprocess
5
  import numpy as np
6
 
7
- zero = torch.Tensor([0]).cuda()
8
- print(zero.device) # <-- 'cpu' πŸ€”
9
-
10
  @spaces.GPU
11
  def start_ochat_server():
12
- print(zero.device) # <-- 'cuda:0' πŸ€—
13
 
 
 
 
 
14
  # Command to start the ochat inference server
15
  command = [
16
  "python", "-m", "ochat.serving.openai_api_server",
 
4
  import subprocess
5
  import numpy as np
6
 
7
+ device_available = print(torch.cuda.is_available()) # <-- True πŸ€—
8
+ device_count = print(torch.cuda.device_count()) # <-- 1 πŸ€—
9
+ device_name = print(torch.cuda.get_device_name()) # <-- 'A10G' πŸ€—
10
  @spaces.GPU
11
  def start_ochat_server():
12
+ global device_available, device_count, device_name
13
 
14
+ print(device_available)
15
+ print(device_name) # <-- 'cuda:0' πŸ€—
16
+ print(device_count) # <-- 1 πŸ€—
17
+
18
  # Command to start the ochat inference server
19
  command = [
20
  "python", "-m", "ochat.serving.openai_api_server",
requirements.txt CHANGED
@@ -1,7 +1,7 @@
1
  git+https://github.com/huggingface/transformers.git
2
- git+https://github.com/vllm-project/vllm.git
3
  --extra-index-url https://download.pytorch.org/whl/cu113
4
  torch
 
5
  datasets
6
  accelerate
7
  numpy
 
1
  git+https://github.com/huggingface/transformers.git
 
2
  --extra-index-url https://download.pytorch.org/whl/cu113
3
  torch
4
+ git+https://github.com/vllm-project/vllm.git
5
  datasets
6
  accelerate
7
  numpy