adi-123 commited on
Commit
e39f02b
1 Parent(s): 0106e5c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -8,6 +8,10 @@ from langchain.prompts import PromptTemplate
8
  from dotenv import load_dotenv
9
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
10
  from langchain import HuggingFaceHub
 
 
 
 
11
 
12
  def get_pdf_text(pdf_docs):
13
  """Extracts text from all pages of provided PDF documents"""
@@ -34,7 +38,7 @@ def get_conversational_chain():
34
  prompt_template = """Answer the question concisely, focusing on the most relevant and important details from the PDF context. Refrain from mentioning any mathematical equations, even if they are present in provided context. Focus on the textual information available. Please provide direct quotations or references from PDF to back up your response. If the answer is not found within the PDF, please state "answer is not available in the context."\n\nContext:\n {context}?\nQuestion: \n{question}\nExample response format:Overview: (brief summary or introduction)Key points: (point 1: paragraph for key details)(point 2: paragraph for key details)...Use a mix of paragraphs and points to effectively convey the information."""
35
 
36
  # Adjust temperature parameter to lower value to reduce model creativity & focus on factual accuracy
37
- model = HuggingFaceHub(repo_id="google/flan-t5-xl", model_kwargs={"temperature": 0.2, "max_length": 100})
38
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
39
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
40
  return chain
 
8
  from dotenv import load_dotenv
9
  from langchain_community.embeddings import HuggingFaceBgeEmbeddings
10
  from langchain import HuggingFaceHub
11
+ from dotenv import load_dotenv
12
+ import os
13
+
14
+ load_dotenv()
15
 
16
  def get_pdf_text(pdf_docs):
17
  """Extracts text from all pages of provided PDF documents"""
 
38
  prompt_template = """Answer the question concisely, focusing on the most relevant and important details from the PDF context. Refrain from mentioning any mathematical equations, even if they are present in provided context. Focus on the textual information available. Please provide direct quotations or references from PDF to back up your response. If the answer is not found within the PDF, please state "answer is not available in the context."\n\nContext:\n {context}?\nQuestion: \n{question}\nExample response format:Overview: (brief summary or introduction)Key points: (point 1: paragraph for key details)(point 2: paragraph for key details)...Use a mix of paragraphs and points to effectively convey the information."""
39
 
40
  # Adjust temperature parameter to lower value to reduce model creativity & focus on factual accuracy
41
+ model = HuggingFaceHub(repo_id="google/flan-t5-xl", model_kwargs={"temperature": 0.2, "max_length": 100}, token=os.environ['HUGGINGFACEHUB_API_TOKEN']))
42
  prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
43
  chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
44
  return chain