Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -38,6 +38,17 @@ def load_models(inp):
|
|
38 |
out_box[z]=(gr.update(label=models[inp[z]]))
|
39 |
return out_box[0],out_box[1],out_box[2],out_box[3]
|
40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
def format_prompt_gemma(message, history):
|
43 |
prompt = ""
|
@@ -60,13 +71,14 @@ def format_prompt_mixtral(message, history):
|
|
60 |
prompt += f" {bot_response}</s> "
|
61 |
prompt += f"[INST] {message} [/INST]"
|
62 |
return prompt
|
|
|
63 |
def format_prompt_choose(message, history, model_name):
|
64 |
-
if "gemma" in models[model_name].lower():
|
65 |
return format_prompt_gemma(message,history)
|
66 |
if "mixtral" in models[model_name].lower():
|
67 |
return format_prompt_mixtral(message,history)
|
68 |
else:
|
69 |
-
return
|
70 |
|
71 |
|
72 |
|
|
|
38 |
out_box[z]=(gr.update(label=models[inp[z]]))
|
39 |
return out_box[0],out_box[1],out_box[2],out_box[3]
|
40 |
|
41 |
+
def format_prompt_default(message, history):
|
42 |
+
prompt = ""
|
43 |
+
if history:
|
44 |
+
#<start_of_turn>userHow does the brain work?<end_of_turn><start_of_turn>model
|
45 |
+
for user_prompt, bot_response in history:
|
46 |
+
prompt += f"{user_prompt}\n"
|
47 |
+
print(prompt)
|
48 |
+
prompt += f"{bot_response}\n"
|
49 |
+
print(prompt)
|
50 |
+
prompt += f"{message}\n"
|
51 |
+
return prompt
|
52 |
|
53 |
def format_prompt_gemma(message, history):
|
54 |
prompt = ""
|
|
|
71 |
prompt += f" {bot_response}</s> "
|
72 |
prompt += f"[INST] {message} [/INST]"
|
73 |
return prompt
|
74 |
+
|
75 |
def format_prompt_choose(message, history, model_name):
|
76 |
+
if "gemma" in models[model_name].lower() and "it" in models[model_name].lower():
|
77 |
return format_prompt_gemma(message,history)
|
78 |
if "mixtral" in models[model_name].lower():
|
79 |
return format_prompt_mixtral(message,history)
|
80 |
else:
|
81 |
+
return format_prompt_default(message,history)
|
82 |
|
83 |
|
84 |
|