pratikshahp commited on
Commit
9858cab
1 Parent(s): 08f60d8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -32
app.py CHANGED
@@ -29,38 +29,47 @@ if uploaded_file:
29
  api_key = st.text_input("Enter your MistralAI API Key", type="password")
30
 
31
  if api_key:
32
- # Define the embedding model
33
- embeddings = MistralAIEmbeddings(model="mistral-embed", mistral_api_key=api_key)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
- # Create the vector store
36
- vector = FAISS.from_documents(documents, embeddings)
37
-
38
- # Define a retriever interface
39
- retriever = vector.as_retriever()
40
-
41
- # Define LLM
42
- model = ChatMistralAI(mistral_api_key=api_key)
43
-
44
- # Define prompt template
45
- prompt = ChatPromptTemplate.from_template("""Answer the following question based only on the provided context:
46
-
47
- <context>
48
- {context}
49
- </context>
50
-
51
- Question: {input}""")
52
-
53
- # Create a retrieval chain to answer questions
54
- document_chain = create_stuff_documents_chain(model, prompt)
55
- retrieval_chain = create_retrieval_chain(retriever, document_chain)
56
-
57
- # User prompt input
58
- user_prompt = st.text_input("Enter your question")
59
-
60
- if user_prompt:
61
- with st.spinner("Processing..."):
62
- response = retrieval_chain.invoke({"input": user_prompt})
63
- st.write(response["answer"])
64
 
65
  else:
66
- st.write("Please upload a PDF file to get started...")
 
29
  api_key = st.text_input("Enter your MistralAI API Key", type="password")
30
 
31
  if api_key:
32
+ try:
33
+ # Define the embedding model
34
+ embeddings = MistralAIEmbeddings(model="mistral-embed", mistral_api_key=api_key)
35
+
36
+ # Create the vector store
37
+ vector = FAISS.from_documents(documents, embeddings)
38
+
39
+ # Define a retriever interface
40
+ retriever = vector.as_retriever()
41
+
42
+ # Define LLM
43
+ model = ChatMistralAI(mistral_api_key=api_key)
44
+
45
+ # Define prompt template
46
+ prompt = ChatPromptTemplate.from_template("""Answer the following question based only on the provided context:
47
+
48
+ <context>
49
+ {context}
50
+ </context>
51
+
52
+ Question: {input}""")
53
+
54
+ # Create a retrieval chain to answer questions
55
+ document_chain = create_stuff_documents_chain(model, prompt)
56
+ retrieval_chain = create_retrieval_chain(retriever, document_chain)
57
+
58
+ # User prompt input
59
+ user_prompt = st.text_input("Enter your question")
60
+
61
+ if user_prompt:
62
+ with st.spinner("Processing..."):
63
+ response = retrieval_chain.invoke({"input": user_prompt})
64
+ if "answer" in response:
65
+ st.write(response["answer"])
66
+ else:
67
+ st.write("No answer found.")
68
 
69
+ except Exception as e:
70
+ st.error(f"Error: {e}")
71
+ # Print or log detailed error information for debugging
72
+ st.exception(e)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
  else:
75
+ st.write("Please upload a PDF file to get started.")