Geinji commited on
Commit
c8113fa
1 Parent(s): 7276c3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -1
app.py CHANGED
@@ -1,5 +1,60 @@
1
  import streamlit as st
2
- import streamlit as st
3
  from transformers import pipeline
4
  import PyPDF2
5
  import requests
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
 
2
  from transformers import pipeline
3
  import PyPDF2
4
  import requests
5
+
6
+ # Constants for Groq API
7
+ GROQ_API_URL = "https://api.groq.com/your_endpoint" # Replace with your Groq endpoint
8
+ GROQ_SECRET_KEY = "your_secret_key" # Replace with your secret key
9
+ HUGGINGFACE_MODEL = "deepset/bert-base-cased-squad2" # Choose your model
10
+
11
+ # Load Hugging Face model for question-answering
12
+ qa_pipeline = pipeline("question-answering", model=HUGGINGFACE_MODEL)
13
+
14
+ # Function to extract text from PDF
15
+ def extract_text_from_pdf(pdf_file):
16
+ reader = PyPDF2.PdfReader(pdf_file)
17
+ text = ''
18
+ for page in reader.pages:
19
+ text += page.extract_text() + '\n'
20
+ return text
21
+
22
+ # Function to run inference using Groq
23
+ def groq_inference(question, context):
24
+ headers = {
25
+ "Authorization": f"Bearer {GROQ_SECRET_KEY}",
26
+ "Content-Type": "application/json"
27
+ }
28
+ payload = {
29
+ "question": question,
30
+ "context": context
31
+ }
32
+ response = requests.post(GROQ_API_URL, headers=headers, json=payload)
33
+ if response.status_code == 200:
34
+ return response.json()['answer']
35
+ else:
36
+ return "Error: Unable to get response from Groq."
37
+
38
+ # Streamlit UI
39
+ st.title("Document Chatbot")
40
+ st.write("Upload a PDF document to interact with it!")
41
+
42
+ # File uploader
43
+ uploaded_file = st.file_uploader("Choose a PDF file", type="pdf")
44
+
45
+ if uploaded_file:
46
+ # Extract text from the uploaded PDF
47
+ document_text = extract_text_from_pdf(uploaded_file)
48
+ st.write("Document successfully uploaded and processed.")
49
+
50
+ # Chat interface
51
+ user_question = st.text_input("Ask a question about the document:")
52
+
53
+ if user_question:
54
+ # First try to get the answer from Hugging Face model
55
+ hf_answer = qa_pipeline(question=user_question, context=document_text)['answer']
56
+ st.write("Answer from Hugging Face Model:", hf_answer)
57
+
58
+ # Then try to get the answer from Groq
59
+ groq_answer = groq_inference(user_question, document_text)
60
+ st.write("Answer from Groq:", groq_answer)