Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,9 +3,11 @@ from huggingface_hub import InferenceClient
|
|
3 |
|
4 |
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
|
5 |
|
6 |
-
def format_alpaca_prompt(user_input, system_prompt):
|
7 |
"""Formats input in Alpaca/LLaMA style"""
|
|
|
8 |
prompt = f"""{system_prompt}
|
|
|
9 |
|
10 |
### Instruction:
|
11 |
{user_input}
|
@@ -15,7 +17,7 @@ def format_alpaca_prompt(user_input, system_prompt):
|
|
15 |
return prompt
|
16 |
|
17 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
18 |
-
formatted_prompt = format_alpaca_prompt(message, system_message)
|
19 |
|
20 |
response = client.text_generation(
|
21 |
formatted_prompt,
|
@@ -27,6 +29,8 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
27 |
# ✅ Extract only the response
|
28 |
cleaned_response = response.split("### Response:")[-1].strip()
|
29 |
|
|
|
|
|
30 |
yield cleaned_response # ✅ Output only the answer
|
31 |
|
32 |
demo = gr.ChatInterface(
|
@@ -39,6 +43,5 @@ demo = gr.ChatInterface(
|
|
39 |
],
|
40 |
)
|
41 |
|
42 |
-
if
|
43 |
-
__name__ == "__main__":
|
44 |
demo.launch()
|
|
|
3 |
|
4 |
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
|
5 |
|
6 |
+
def format_alpaca_prompt(user_input, system_prompt, history):
|
7 |
"""Formats input in Alpaca/LLaMA style"""
|
8 |
+
history_str = "\n".join([f"### Instruction:\n{h[0]}\n### Response:\n{h[1]}" for h in history])
|
9 |
prompt = f"""{system_prompt}
|
10 |
+
{history_str}
|
11 |
|
12 |
### Instruction:
|
13 |
{user_input}
|
|
|
17 |
return prompt
|
18 |
|
19 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
20 |
+
formatted_prompt = format_alpaca_prompt(message, system_message, history)
|
21 |
|
22 |
response = client.text_generation(
|
23 |
formatted_prompt,
|
|
|
29 |
# ✅ Extract only the response
|
30 |
cleaned_response = response.split("### Response:")[-1].strip()
|
31 |
|
32 |
+
history.append((message, cleaned_response)) # ✅ Update history with the new message and response
|
33 |
+
|
34 |
yield cleaned_response # ✅ Output only the answer
|
35 |
|
36 |
demo = gr.ChatInterface(
|
|
|
43 |
],
|
44 |
)
|
45 |
|
46 |
+
if __name__ == "__main__":
|
|
|
47 |
demo.launch()
|