Spaces:
Sleeping
Sleeping
Roberta2024
commited on
Commit
•
58c8e3b
1
Parent(s):
14f5340
Update app.py
Browse files
app.py
CHANGED
@@ -28,7 +28,7 @@ def process_pdf(file_path, question):
|
|
28 |
|
29 |
pdf_loader = PyPDFLoader(file_path)
|
30 |
pages = pdf_loader.load_and_split()
|
31 |
-
context = "\n".join(str(page.page_content) for page in pages[:
|
32 |
stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
33 |
stuff_answer = stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
|
34 |
return stuff_answer['output_text']
|
@@ -42,7 +42,7 @@ def generate_mistral_followup(answer):
|
|
42 |
mistral_prompt = f"Based on this answer: {answer}\nGenerate a follow-up question:"
|
43 |
mistral_inputs = mistral_tokenizer.encode(mistral_prompt, return_tensors='pt').to(device)
|
44 |
with torch.no_grad():
|
45 |
-
mistral_outputs = mistral_model.generate(mistral_inputs, max_length=
|
46 |
mistral_output = mistral_tokenizer.decode(mistral_outputs[0], skip_special_tokens=True)
|
47 |
return mistral_output
|
48 |
|
|
|
28 |
|
29 |
pdf_loader = PyPDFLoader(file_path)
|
30 |
pages = pdf_loader.load_and_split()
|
31 |
+
context = "\n".join(str(page.page_content) for page in pages[:200])
|
32 |
stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
33 |
stuff_answer = stuff_chain({"input_documents": pages, "question": question, "context": context}, return_only_outputs=True)
|
34 |
return stuff_answer['output_text']
|
|
|
42 |
mistral_prompt = f"Based on this answer: {answer}\nGenerate a follow-up question:"
|
43 |
mistral_inputs = mistral_tokenizer.encode(mistral_prompt, return_tensors='pt').to(device)
|
44 |
with torch.no_grad():
|
45 |
+
mistral_outputs = mistral_model.generate(mistral_inputs, max_length=200)
|
46 |
mistral_output = mistral_tokenizer.decode(mistral_outputs[0], skip_special_tokens=True)
|
47 |
return mistral_output
|
48 |
|