Spaces:
Sleeping
Sleeping
NCTCMumbai
commited on
Update backend/query_llm.py
Browse files- backend/query_llm.py +6 -6
backend/query_llm.py
CHANGED
@@ -31,15 +31,15 @@ genai.configure(api_key=GOOGLE_API_KEY)
|
|
31 |
OPENAI_KEY = getenv("OPENAI_API_KEY")
|
32 |
HF_TOKEN = getenv("HUGGING_FACE_HUB_TOKEN")
|
33 |
|
34 |
-
# hf_client = InferenceClient(
|
35 |
-
# "mistralai/Mistral-7B-Instruct-v0.1",
|
36 |
-
# token=HF_TOKEN
|
37 |
-
# )
|
38 |
-
|
39 |
hf_client = InferenceClient(
|
40 |
-
"mistralai/
|
41 |
token=HF_TOKEN
|
42 |
)
|
|
|
|
|
|
|
|
|
|
|
43 |
def format_prompt(message: str, api_kind: str):
|
44 |
"""
|
45 |
Formats the given message using a chat template.
|
|
|
31 |
OPENAI_KEY = getenv("OPENAI_API_KEY")
|
32 |
HF_TOKEN = getenv("HUGGING_FACE_HUB_TOKEN")
|
33 |
|
|
|
|
|
|
|
|
|
|
|
34 |
hf_client = InferenceClient(
|
35 |
+
"mistralai/Mistral-7B-Instruct-v0.1",
|
36 |
token=HF_TOKEN
|
37 |
)
|
38 |
+
|
39 |
+
# hf_client = InferenceClient(
|
40 |
+
# "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
41 |
+
# token=HF_TOKEN
|
42 |
+
# )
|
43 |
def format_prompt(message: str, api_kind: str):
|
44 |
"""
|
45 |
Formats the given message using a chat template.
|