Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -103,17 +103,17 @@ async def generate_report(query: QueryModel):
|
|
103 |
urls, optimized_search_query = search_brave(search_query, num_results=8)
|
104 |
all_text_with_urls = fetch_and_extract_content(data_format, urls, query_str)
|
105 |
reference = limit_tokens(str(all_text_with_urls))
|
106 |
-
|
107 |
except Exception as e:
|
108 |
print(e)
|
109 |
query.internet = False
|
110 |
print("failed to search/scrape results, falling back to LLM response")
|
111 |
|
112 |
if not query.internet:
|
113 |
-
|
114 |
system_prompt_final = prompt_system["offline"]
|
115 |
|
116 |
-
md_report = together_response(
|
117 |
|
118 |
if user_id != "test":
|
119 |
insert_data(user_id, query_str, description, str(all_text_with_urls), md_report)
|
|
|
103 |
urls, optimized_search_query = search_brave(search_query, num_results=8)
|
104 |
all_text_with_urls = fetch_and_extract_content(data_format, urls, query_str)
|
105 |
reference = limit_tokens(str(all_text_with_urls))
|
106 |
+
user_prompt_final = user_prompt_final.format(description,reference)
|
107 |
except Exception as e:
|
108 |
print(e)
|
109 |
query.internet = False
|
110 |
print("failed to search/scrape results, falling back to LLM response")
|
111 |
|
112 |
if not query.internet:
|
113 |
+
user_prompt_final = prompt_user["offline"][query.output_format].format(description)
|
114 |
system_prompt_final = prompt_system["offline"]
|
115 |
|
116 |
+
md_report = together_response(user_prompt_final, model=llm_default_medium, SysPrompt=system_prompt_final)
|
117 |
|
118 |
if user_id != "test":
|
119 |
insert_data(user_id, query_str, description, str(all_text_with_urls), md_report)
|