Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -25,7 +25,7 @@ def generate(
|
|
25 |
if model_name == "Felladrin/Pythia-31M-Chat-v1":
|
26 |
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=2, repetition_penalty=1.0016)
|
27 |
elif model_name == "Felladrin/Llama-68M-Chat-v1":
|
28 |
-
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.
|
29 |
elif model_name == "Felladrin/Smol-Llama-101M-Chat-v1":
|
30 |
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.105)
|
31 |
elif model_name == "Felladrin/Llama-160M-Chat-v1":
|
|
|
25 |
if model_name == "Felladrin/Pythia-31M-Chat-v1":
|
26 |
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=2, repetition_penalty=1.0016)
|
27 |
elif model_name == "Felladrin/Llama-68M-Chat-v1":
|
28 |
+
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.043)
|
29 |
elif model_name == "Felladrin/Smol-Llama-101M-Chat-v1":
|
30 |
outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.105)
|
31 |
elif model_name == "Felladrin/Llama-160M-Chat-v1":
|