NCTCMumbai
commited on
Commit
•
7dba37b
1
Parent(s):
62dc511
Update backend/query_llm.py
Browse files- backend/query_llm.py +2 -1
backend/query_llm.py
CHANGED
@@ -58,7 +58,7 @@ def format_prompt(message: str, api_kind: str):
|
|
58 |
if api_kind == "openai":
|
59 |
return messages1
|
60 |
elif api_kind == "hf":
|
61 |
-
return tokenizer.apply_chat_template(
|
62 |
elif api_kind=="gemini":
|
63 |
print(messages2)
|
64 |
return messages2
|
@@ -97,6 +97,7 @@ def generate_hf(prompt: str, history: str, temperature: float = 0.9, max_new_tok
|
|
97 |
}
|
98 |
|
99 |
formatted_prompt = format_prompt(prompt, "hf")
|
|
|
100 |
|
101 |
try:
|
102 |
stream = hf_client.text_generation(formatted_prompt, **generate_kwargs,
|
|
|
58 |
if api_kind == "openai":
|
59 |
return messages1
|
60 |
elif api_kind == "hf":
|
61 |
+
return tokenizer.apply_chat_template(messages1, tokenize=False)
|
62 |
elif api_kind=="gemini":
|
63 |
print(messages2)
|
64 |
return messages2
|
|
|
97 |
}
|
98 |
|
99 |
formatted_prompt = format_prompt(prompt, "hf")
|
100 |
+
print('formatted_prompt ', formatted_prompt )
|
101 |
|
102 |
try:
|
103 |
stream = hf_client.text_generation(formatted_prompt, **generate_kwargs,
|