Spaces:
Running
Running
Update custom_utils.py
Browse files- custom_utils.py +17 -3
custom_utils.py
CHANGED
@@ -126,12 +126,26 @@ def rag_retrieval_advanced(openai_api_key,
|
|
126 |
return retrieval_result
|
127 |
|
128 |
def inference(openai_api_key, prompt):
|
129 |
-
content =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
return invoke_llm(openai_api_key, content)
|
131 |
|
132 |
def rag_inference(openai_api_key, prompt, retrieval_result):
|
133 |
-
content =
|
134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
return invoke_llm(openai_api_key, content)
|
136 |
|
137 |
def invoke_llm(openai_api_key, content):
|
|
|
126 |
return retrieval_result
|
127 |
|
128 |
def inference(openai_api_key, prompt):
|
129 |
+
content = (
|
130 |
+
"Answer the question.\n"
|
131 |
+
"If you don't know the answer, just say that you don't know, don't try to make up an answer.\n"
|
132 |
+
"Keep the answer as concise as possible.\n\n"
|
133 |
+
f"Question: {prompt}\n"
|
134 |
+
"Helpful Answer: "
|
135 |
+
)
|
136 |
+
|
137 |
return invoke_llm(openai_api_key, content)
|
138 |
|
139 |
def rag_inference(openai_api_key, prompt, retrieval_result):
|
140 |
+
content = (
|
141 |
+
"Use the following pieces of context to answer the question at the end.\n"
|
142 |
+
"If you don't know the answer, just say that you don't know, don't try to make up an answer.\n"
|
143 |
+
"Keep the answer as concise as possible.\n\n"
|
144 |
+
f"{retrieval_result}\n\n"
|
145 |
+
f"Question: {prompt}\n"
|
146 |
+
"Helpful Answer: "
|
147 |
+
)
|
148 |
+
|
149 |
return invoke_llm(openai_api_key, content)
|
150 |
|
151 |
def invoke_llm(openai_api_key, content):
|