pvanand commited on
Commit
e7a043e
1 Parent(s): 6599c5f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +24 -32
main.py CHANGED
@@ -54,63 +54,55 @@ sys_prompts = {
54
  },
55
  }
56
 
57
- class QueryModel(BaseModel):
58
- topic: str = Query(default="market research", description="input query to generate Report")
59
- description: str = Query(default="", description="additional context for report")
60
- user_id: str = Query(default="", description="unique user id")
61
- user_name: str = Query(default="", description="user name")
62
- internet: bool = Query(default=True, description="Enable Internet search")
63
- output_format: str = Query(default="Tabular Report", description="Output format for the report", enum=["Chat", "Full Text Report", "Tabular Report", "Tables only"])
64
- data_format: str = Query(default="Structured data", description="Type of data to extract from the internet", enum=["No presets", "Structured data", "Quantitative data"])
65
-
66
-
67
-
68
-
69
  @app.post("/generate_report")
70
- @cache(expire=604800)
71
- async def generate_report(query: QueryModel):
72
- query_str = query.topic
73
- description = query.description
74
- user_id = query.user_id
75
- internet = "online" if query.internet else "offline"
76
- sys_prompt_output_format = sys_prompts[internet][query.output_format]
77
- data_format = query.data_format
 
 
 
 
 
78
  optimized_search_query = ""
79
  all_text_with_urls = [("","")]
80
-
81
  # Combine query with user keywords
82
- if query.internet:
83
  search_query = description
84
  # Search for relevant URLs
85
  try:
86
- urls,optimized_search_query = search_brave(search_query, num_results=4)
87
  # Fetch and extract content from the URLs
88
  all_text_with_urls = fetch_and_extract_content(data_format, urls, query_str)
89
  # Prepare the prompt for generating the report
90
  additional_context = limit_tokens(str(all_text_with_urls))
91
  prompt = f"#### COMPLETE THE TASK: {description} #### IN THE CONTEXT OF ### CONTEXT: {query_str} USING THE #### SCRAPED DATA:{additional_context}"
92
-
93
  except Exception as e:
94
- query.internet = False
95
  print("failed to search/scrape results, falling back to LLM response")
96
-
97
- if not query.internet:
98
  prompt = f"#### COMPLETE THE TASK: {description} #### IN THE CONTEXT OF ### CONTEXT: {query_str}"
99
 
100
  md_report = together_response(prompt, model=llm_default_medium, SysPrompt=sys_prompt_output_format)
101
-
102
- if user_id != "test":
103
- insert_data(user_id, query_str, description, str(all_text_with_urls), md_report)
 
104
  references_html = dict()
105
  for text, url in all_text_with_urls:
106
  references_html[url] = str(md_to_html(text))
107
 
108
-
109
  # Return the generated report
110
  return {
111
  "report": md_to_html(md_report),
112
  "references": references_html,
113
- "search_qury":optimized_search_query
114
  }
115
 
116
  app.add_middleware(
 
54
  },
55
  }
56
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  @app.post("/generate_report")
58
+ @cache(expire=604800) # Set cache expiration to 7 days (7 * 24 * 60 * 60 seconds)
59
+ async def generate_report(
60
+ topic: str = Query(default="market research", description="input query to generate Report"),
61
+ description: str = Query(default="", description="additional context for report"),
62
+ user_id: str = Query(default="", description="unique user id"),
63
+ user_name: str = Query(default="", description="user name"),
64
+ internet: bool = Query(default=True, description="Enable Internet search"),
65
+ output_format: str = Query(default="Tabular Report", description="Output format for the report", enum=["Chat", "Full Text Report", "Tabular Report", "Tables only"]),
66
+ data_format: str = Query(default="Structured data", description="Type of data to extract from the internet", enum=["No presets", "Structured data", "Quantitative data"])
67
+ ):
68
+ query_str = topic
69
+ internet_status = "online" if internet else "offline"
70
+ sys_prompt_output_format = sys_prompts[internet_status][output_format]
71
  optimized_search_query = ""
72
  all_text_with_urls = [("","")]
73
+
74
  # Combine query with user keywords
75
+ if internet:
76
  search_query = description
77
  # Search for relevant URLs
78
  try:
79
+ urls, optimized_search_query = search_brave(search_query, num_results=4)
80
  # Fetch and extract content from the URLs
81
  all_text_with_urls = fetch_and_extract_content(data_format, urls, query_str)
82
  # Prepare the prompt for generating the report
83
  additional_context = limit_tokens(str(all_text_with_urls))
84
  prompt = f"#### COMPLETE THE TASK: {description} #### IN THE CONTEXT OF ### CONTEXT: {query_str} USING THE #### SCRAPED DATA:{additional_context}"
 
85
  except Exception as e:
86
+ internet = False
87
  print("failed to search/scrape results, falling back to LLM response")
88
+
89
+ if not internet:
90
  prompt = f"#### COMPLETE THE TASK: {description} #### IN THE CONTEXT OF ### CONTEXT: {query_str}"
91
 
92
  md_report = together_response(prompt, model=llm_default_medium, SysPrompt=sys_prompt_output_format)
93
+
94
+ if user_id != "test":
95
+ insert_data(user_id, query_str, description, str(all_text_with_urls), md_report)
96
+
97
  references_html = dict()
98
  for text, url in all_text_with_urls:
99
  references_html[url] = str(md_to_html(text))
100
 
 
101
  # Return the generated report
102
  return {
103
  "report": md_to_html(md_report),
104
  "references": references_html,
105
+ "search_query": optimized_search_query
106
  }
107
 
108
  app.add_middleware(