vtiyyal1 commited on
Commit
cde1f49
1 Parent(s): 12cca3e

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +19 -66
  2. full_chain.py +8 -21
app.py CHANGED
@@ -1,80 +1,33 @@
 
1
  import openai
2
  import gradio as gr
3
  from full_chain import get_response
4
  import os
5
- import logging
6
 
7
- # Configure logging
8
- logging.basicConfig(
9
- level=logging.INFO,
10
- format='%(asctime)s - %(levelname)s - %(message)s',
11
- handlers=[
12
- logging.FileHandler('app.log'),
13
- logging.StreamHandler()
14
- ]
15
- )
16
- logger = logging.getLogger(__name__)
17
 
18
- # Initialize OpenAI client
19
- try:
20
- api_key = os.getenv("OPENAI_API_KEY")
21
- if not api_key:
22
- raise ValueError("OPENAI_API_KEY environment variable not set")
23
- client = openai.OpenAI(api_key=api_key)
24
- logger.info("OpenAI client initialized successfully")
25
- except Exception as e:
26
- logger.error(f"Failed to initialize OpenAI client: {str(e)}")
27
- raise
28
 
29
  def create_hyperlink(url, title, domain):
30
- """Create HTML hyperlink with domain information."""
31
- return f"<a href='{url}'>{title}</a> ({domain})"
32
 
33
  def predict(message, history):
34
- """Process user message and return response with source links."""
35
- try:
36
- logger.info(f"Processing new query: {message}")
37
-
38
- # Get response from the chain
39
- responder, links, titles, domains = get_response(message, rerank_type="crossencoder")
40
- logger.info(f"Received response with {len(links)} sources")
41
-
42
- # Create hyperlinks for sources
43
- formatted_links = [create_hyperlink(link, title, domain)
44
- for link, title, domain in zip(links, titles, domains)]
45
-
46
- # Combine response with sources
47
- out = responder + "\n" + "\n".join(formatted_links)
48
-
49
- logger.info("Response generated successfully")
50
- return out
51
 
52
- except Exception as e:
53
- error_msg = f"Error processing query: {str(e)}"
54
- logger.error(error_msg)
55
- return f"An error occurred while processing your request: {str(e)}"
56
 
57
- # Define example queries
58
- EXAMPLE_QUERIES = [
59
- "How many Americans Smoke?",
60
- "What are some measures taken by the Indian Government to reduce the smoking population?",
61
- "Does smoking negatively affect my health?"
62
- ]
63
 
64
- # Initialize and launch Gradio interface
65
- def main():
66
- try:
67
- interface = gr.ChatInterface(
68
- predict,
69
- examples=EXAMPLE_QUERIES,
70
- title="Tobacco Information Assistant",
71
- description="Ask questions about tobacco-related topics and get answers with reliable sources."
72
- )
73
- logger.info("Starting Gradio interface")
74
- interface.launch()
75
- except Exception as e:
76
- logger.error(f"Failed to launch Gradio interface: {str(e)}")
77
- raise
78
 
79
- if __name__ == "__main__":
80
- main()
 
 
 
 
 
 
1
+
2
  import openai
3
  import gradio as gr
4
  from full_chain import get_response
5
  import os
 
6
 
7
+ api_key = os.getenv("OPENAI_API_KEY")
8
+ client = openai.OpenAI(api_key=api_key)
 
 
 
 
 
 
 
 
9
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  def create_hyperlink(url, title, domain):
12
+ return f"<a href='{url}'>{title}</a>" + " (" + domain + ")"
13
+
14
 
15
  def predict(message, history):
16
+ print("get_responses: ")
17
+ # print(get_response(message, rerank_type="crossencoder"))
18
+ responder, links, titles, domains = get_response(message, rerank_type="crossencoder")
19
+ for i in range(len(links)):
20
+ links[i] = create_hyperlink(links[i], titles[i], domains[i])
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
+ out = responder + "\n" + "\n".join(links)
 
 
 
23
 
24
+ return out
 
 
 
 
 
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
+ gr.ChatInterface(predict,
28
+ examples = [
29
+ "How many Americans Smoke?",
30
+ "What are some measures taken by the Indian Government to reduce the smoking population?",
31
+ "Does smoking negatively affect my health?"
32
+ ]
33
+ ).launch()
full_chain.py CHANGED
@@ -9,29 +9,16 @@ from feed_to_llm_v2 import feed_articles_to_gpt_with_links
9
 
10
 
11
  def get_response(question, rerank_type="crossencoder", llm_type="chat"):
12
-
13
- try:
14
-
15
- csv_path = save_solr_articles_full(question, keyword_type="rake")
16
-
17
- reranked_out = crossencoder_rerank_answer(csv_path, question)
18
-
19
-
20
- # Prepare source metadata for citations
21
- citations = [
22
- {"title": article["title"], "url": article["url"], "source": article["source"]}
23
- for article in reranked_out
24
- ]
25
-
26
-
27
- result = feed_articles_to_gpt_with_links(reranked_out, question, citations)
28
-
29
-
30
- return result
31
- except Exception as e:
32
- return "", [], [], []
33
 
34
 
 
 
 
 
 
35
 
36
 
37
 
 
9
 
10
 
11
  def get_response(question, rerank_type="crossencoder", llm_type="chat"):
12
+ csv_path = save_solr_articles_full(question, keyword_type="rake")
13
+ reranked_out = crossencoder_rerank_answer(csv_path, question)
14
+ return feed_articles_to_gpt_with_links(reranked_out, question)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
 
17
+ # save_path = save_solr_articles_full(question)
18
+ # information = crossencoder_rerank_answer(save_path, question)
19
+ # response, links, titles = feed_articles_to_gpt_with_links(information, question)
20
+ #
21
+ # return response, links, titles
22
 
23
 
24