Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,22 +1,24 @@
|
|
1 |
import gradio as gr
|
|
|
2 |
import os
|
3 |
-
from langchain import PromptTemplate
|
|
|
4 |
from langchain_together import Together
|
|
|
5 |
import pdfplumber
|
|
|
6 |
|
7 |
-
# Set the API key
|
8 |
os.environ['TOGETHER_API_KEY'] = "c2f52626b97118b71c0c36f66eda4f5957c8fc475e760c3d72f98ba07d3ed3b5"
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
for i, page in enumerate(pdf.pages):
|
14 |
if i >= max_pages:
|
15 |
break
|
16 |
text += page.extract_text() + "\n"
|
17 |
-
return text
|
18 |
|
19 |
-
def Bot(
|
20 |
chat_template = """
|
21 |
Based on the provided context: {text}
|
22 |
Please answer the following question: {Questions}
|
@@ -32,7 +34,7 @@ def Bot(text, question):
|
|
32 |
try:
|
33 |
response = Generated_chat.invoke({
|
34 |
"text": text,
|
35 |
-
"Questions":
|
36 |
})
|
37 |
|
38 |
response_text = response['text']
|
@@ -52,33 +54,34 @@ def Bot(text, question):
|
|
52 |
except Exception as e:
|
53 |
return f"Error in generating response: {e}"
|
54 |
|
55 |
-
def ChatBot(
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
# Generate the bot response based on the question and extracted text
|
65 |
-
response = Bot(text, question)
|
66 |
-
|
67 |
-
# Update chat history with the user's question and bot's response
|
68 |
-
history.append(("User", question))
|
69 |
-
history.append(("Bot", response))
|
70 |
-
|
71 |
-
return history
|
72 |
|
73 |
-
#
|
74 |
-
|
75 |
-
chatbot = gr.Chatbot()
|
76 |
-
document = gr.File(label="Upload PDF Document", type="filepath")
|
77 |
-
question = gr.Textbox(label="Ask a Question", placeholder="Type your question here...")
|
78 |
|
79 |
-
|
80 |
-
|
81 |
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
83 |
|
84 |
-
iface.
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
3 |
import os
|
4 |
+
from langchain import PromptTemplate
|
5 |
+
from langchain import LLMChain
|
6 |
from langchain_together import Together
|
7 |
+
import re
|
8 |
import pdfplumber
|
9 |
+
# Set the API key with double quotes
|
10 |
|
|
|
11 |
os.environ['TOGETHER_API_KEY'] = "c2f52626b97118b71c0c36f66eda4f5957c8fc475e760c3d72f98ba07d3ed3b5"
|
12 |
|
13 |
+
text = ""
|
14 |
+
max_pages = 7
|
15 |
+
with pdfplumber.open("Diabetes Type 1.pdf") as pdf:
|
16 |
for i, page in enumerate(pdf.pages):
|
17 |
if i >= max_pages:
|
18 |
break
|
19 |
text += page.extract_text() + "\n"
|
|
|
20 |
|
21 |
+
def Bot(Questions):
|
22 |
chat_template = """
|
23 |
Based on the provided context: {text}
|
24 |
Please answer the following question: {Questions}
|
|
|
34 |
try:
|
35 |
response = Generated_chat.invoke({
|
36 |
"text": text,
|
37 |
+
"Questions": Questions
|
38 |
})
|
39 |
|
40 |
response_text = response['text']
|
|
|
54 |
except Exception as e:
|
55 |
return f"Error in generating response: {e}"
|
56 |
|
57 |
+
def ChatBot(Questions):
|
58 |
+
greetings = ["hi", "hello", "hey", "greetings", "what's up", "howdy"]
|
59 |
+
# Check if the input question is a greeting
|
60 |
+
question_lower = Questions.lower().strip()
|
61 |
+
if question_lower in greetings or any(question_lower.startswith(greeting) for greeting in greetings):
|
62 |
+
return "Hello! How can I assist you with the document today?"
|
63 |
+
else:
|
64 |
+
response=Bot(Questions)
|
65 |
+
print(response)
|
66 |
+
return response.translate(str.maketrans('', '', '\n'))
|
67 |
+
# text_embedding = model.encode(text, convert_to_tensor=True)
|
68 |
+
# statement_embedding = model.encode(statement, convert_to_tensor=True)
|
69 |
|
70 |
+
# # Compute the cosine similarity between the embeddings
|
71 |
+
# similarity = util.pytorch_cos_sim(text_embedding, statement_embedding)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
|
73 |
+
# # Print the similarity score
|
74 |
+
# print(f"Cosine similarity: {similarity.item()}")
|
|
|
|
|
|
|
75 |
|
76 |
+
# # Define a threshold for considering the statement as related
|
77 |
+
# threshold = 0.7
|
78 |
|
79 |
+
# if similarity.item() > threshold:
|
80 |
+
# response=Bot(Questions)
|
81 |
+
# return response
|
82 |
+
# else:
|
83 |
+
# response="The statement is not related to the text."
|
84 |
+
# return response
|
85 |
|
86 |
+
iface = gr.Interface(fn=ChatBot, inputs="text", outputs="text", title="Chatbot")
|
87 |
+
iface.launch(debug=True)
|