Spaces:
Runtime error
Runtime error
artificialguybr
commited on
Commit
•
ac31486
1
Parent(s):
8312b78
Update app.py
Browse files
app.py
CHANGED
@@ -43,8 +43,11 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
|
|
43 |
# Apply the chat template
|
44 |
gen_input = tokenizer.apply_chat_template(messages, return_tensors="pt", add_generation_prompt=True)
|
45 |
|
|
|
|
|
|
|
46 |
# Generate the output
|
47 |
-
output = model.generate(input_ids=
|
48 |
|
49 |
# Decode the output
|
50 |
decoded_output = tokenizer.decode(output[0], skip_special_tokens=True)
|
@@ -54,6 +57,7 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
|
|
54 |
|
55 |
return history, history, ""
|
56 |
|
|
|
57 |
start_message = ""
|
58 |
|
59 |
CSS ="""
|
|
|
43 |
# Apply the chat template
|
44 |
gen_input = tokenizer.apply_chat_template(messages, return_tensors="pt", add_generation_prompt=True)
|
45 |
|
46 |
+
# Extract input_ids
|
47 |
+
input_ids = gen_input['input_ids']
|
48 |
+
|
49 |
# Generate the output
|
50 |
+
output = model.generate(input_ids=input_ids, temperature=temperature, do_sample=True, top_p=top_p, top_k=top_k, max_length=max_tokens)
|
51 |
|
52 |
# Decode the output
|
53 |
decoded_output = tokenizer.decode(output[0], skip_special_tokens=True)
|
|
|
57 |
|
58 |
return history, history, ""
|
59 |
|
60 |
+
|
61 |
start_message = ""
|
62 |
|
63 |
CSS ="""
|