Spaces:
Runtime error
Runtime error
gorkemgoknar
commited on
Commit
•
8328647
1
Parent(s):
c477ff9
Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ MAX_NEW_TOKENS = 25
|
|
24 |
GPU_LAYERS = 0
|
25 |
STOP_LIST=["###","##"]
|
26 |
|
27 |
-
stopping_criteria = StoppingCriteriaList([MaxLengthCriteria(max_length=64)])
|
28 |
|
29 |
from huggingface_hub import hf_hub_download
|
30 |
hf_hub_download(repo_id="gorkemgoknar/llama2-7f-moviechatbot-ggml-q4", local_dir=".", filename="llama2-7f-fp16-ggml-q4.bin")
|
@@ -87,7 +87,7 @@ def get_audio_url(text,character):
|
|
87 |
|
88 |
def get_response_cpp(prompt):
|
89 |
|
90 |
-
output = llm(prompt, max_tokens=32, stop=["#","sierpeda"], echo=True
|
91 |
#print(output)
|
92 |
response_text= output["choices"][0]["text"]
|
93 |
|
|
|
24 |
GPU_LAYERS = 0
|
25 |
STOP_LIST=["###","##"]
|
26 |
|
27 |
+
#stopping_criteria = StoppingCriteriaList([MaxLengthCriteria(max_length=64)])
|
28 |
|
29 |
from huggingface_hub import hf_hub_download
|
30 |
hf_hub_download(repo_id="gorkemgoknar/llama2-7f-moviechatbot-ggml-q4", local_dir=".", filename="llama2-7f-fp16-ggml-q4.bin")
|
|
|
87 |
|
88 |
def get_response_cpp(prompt):
|
89 |
|
90 |
+
output = llm(prompt, max_tokens=32, stop=["#","sierpeda"], echo=True)
|
91 |
#print(output)
|
92 |
response_text= output["choices"][0]["text"]
|
93 |
|