Spaces:
Sleeping
Sleeping
Hazzzardous
commited on
Commit
•
29972a6
1
Parent(s):
657f6af
Update app.py
Browse files
app.py
CHANGED
@@ -31,7 +31,7 @@ DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
31 |
desc = '''<p>RNN with Transformer-level LLM Performance (<a href='https://github.com/BlinkDL/RWKV-LM'>github</a>).
|
32 |
According to the author: "It combines the best of RNN and transformers - great performance, fast inference, saves VRAM, fast training, "infinite" ctx_len, and free sentence embedding."'''
|
33 |
|
34 |
-
thanks = '''<p>Thanks to <a href='https://
|
35 |
|
36 |
|
37 |
def to_md(text):
|
@@ -95,7 +95,7 @@ def infer(
|
|
95 |
done = False
|
96 |
with torch.no_grad():
|
97 |
for _ in range(max_new_tokens):
|
98 |
-
char = model.forward(stopStrings=stop, temp=temperature, top_p_usual=top_p)[
|
99 |
"output"]
|
100 |
print(char, end='', flush=True)
|
101 |
generated_text += char
|
|
|
31 |
desc = '''<p>RNN with Transformer-level LLM Performance (<a href='https://github.com/BlinkDL/RWKV-LM'>github</a>).
|
32 |
According to the author: "It combines the best of RNN and transformers - great performance, fast inference, saves VRAM, fast training, "infinite" ctx_len, and free sentence embedding."'''
|
33 |
|
34 |
+
thanks = '''<p>Thanks to <a href='https://github.com/gururise/rwkv_gradio'>Gururise</a> for this template</p>'''
|
35 |
|
36 |
|
37 |
def to_md(text):
|
|
|
95 |
done = False
|
96 |
with torch.no_grad():
|
97 |
for _ in range(max_new_tokens):
|
98 |
+
char = model.forward(stopStrings=stop, temp=temperature, top_p_usual=top_p, end_adj=end_adj)[
|
99 |
"output"]
|
100 |
print(char, end='', flush=True)
|
101 |
generated_text += char
|