Spaces:
Running
on
Zero
Running
on
Zero
layerdiffusion
commited on
Commit
•
6c86f59
1
Parent(s):
68b62c1
app.py
CHANGED
@@ -20,6 +20,7 @@ os.environ['HF_HOME'] = os.path.join(os.path.dirname(__file__), 'hf_download')
|
|
20 |
HF_TOKEN = os.environ['hf_token'] if 'hf_token' in os.environ else None
|
21 |
|
22 |
import uuid
|
|
|
23 |
import torch
|
24 |
import numpy as np
|
25 |
import gradio as gr
|
@@ -124,6 +125,7 @@ def resize_without_crop(image, target_width, target_height):
|
|
124 |
@torch.inference_mode()
|
125 |
def chat_fn(message: str, history: list, seed:int, temperature: float, top_p: float, max_new_tokens: int) -> str:
|
126 |
print('Chat begin:', message)
|
|
|
127 |
|
128 |
np.random.seed(int(seed))
|
129 |
torch.manual_seed(int(seed))
|
@@ -162,7 +164,7 @@ def chat_fn(message: str, history: list, seed:int, temperature: float, top_p: fl
|
|
162 |
# print(outputs)
|
163 |
yield "".join(outputs)
|
164 |
|
165 |
-
print('Chat end:', message)
|
166 |
return
|
167 |
|
168 |
|
|
|
20 |
HF_TOKEN = os.environ['hf_token'] if 'hf_token' in os.environ else None
|
21 |
|
22 |
import uuid
|
23 |
+
import time
|
24 |
import torch
|
25 |
import numpy as np
|
26 |
import gradio as gr
|
|
|
125 |
@torch.inference_mode()
|
126 |
def chat_fn(message: str, history: list, seed:int, temperature: float, top_p: float, max_new_tokens: int) -> str:
|
127 |
print('Chat begin:', message)
|
128 |
+
time_stamp = time.time()
|
129 |
|
130 |
np.random.seed(int(seed))
|
131 |
torch.manual_seed(int(seed))
|
|
|
164 |
# print(outputs)
|
165 |
yield "".join(outputs)
|
166 |
|
167 |
+
print(f'Chat end at {time.time() - time_stamp:.2f} seconds:', message)
|
168 |
return
|
169 |
|
170 |
|