Spaces:
Running
on
Zero
Running
on
Zero
0.17 changed requirements
Browse files- app.py +2 -2
- requirements.txt +4 -2
app.py
CHANGED
@@ -115,14 +115,14 @@ def generate_both(system_prompt, input_text, chatbot_a, chatbot_b, max_new_token
|
|
115 |
add_generation_prompt=True,
|
116 |
dtype=torch.float16,
|
117 |
return_tensors="pt"
|
118 |
-
).to(
|
119 |
|
120 |
input_ids_b = tokenizer_b.apply_chat_template(
|
121 |
new_messages_b,
|
122 |
add_generation_prompt=True,
|
123 |
dtype=torch.float16,
|
124 |
return_tensors="pt"
|
125 |
-
).to(
|
126 |
|
127 |
logging.debug(f'model_a.device: {model_a.device}, model_b.device: {model_b.device}')
|
128 |
|
|
|
115 |
add_generation_prompt=True,
|
116 |
dtype=torch.float16,
|
117 |
return_tensors="pt"
|
118 |
+
).to(device)
|
119 |
|
120 |
input_ids_b = tokenizer_b.apply_chat_template(
|
121 |
new_messages_b,
|
122 |
add_generation_prompt=True,
|
123 |
dtype=torch.float16,
|
124 |
return_tensors="pt"
|
125 |
+
).to(device)
|
126 |
|
127 |
logging.debug(f'model_a.device: {model_a.device}, model_b.device: {model_b.device}')
|
128 |
|
requirements.txt
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
huggingface_hub==0.23.2
|
2 |
transformers==4.44.1
|
3 |
-
torch
|
4 |
accelerate==0.33.0
|
5 |
sentencepiece==0.2.0
|
6 |
-
spaces
|
|
|
|
|
|
1 |
huggingface_hub==0.23.2
|
2 |
transformers==4.44.1
|
3 |
+
torch==2.2.0
|
4 |
accelerate==0.33.0
|
5 |
sentencepiece==0.2.0
|
6 |
+
spaces==0.29.2
|
7 |
+
gradio==4.39.0
|
8 |
+
bitsandbytes==0.43.2
|