import streamlit as st
import os
import pickle
import time
import g4f
import tempfile
import PyPDF2
from pdf2image import convert_from_path
import pytesseract
st.set_page_config(page_title="EDUCATIONAL ASSISTANT")
st.markdown(
"""
📚 EDUCATIONAL ASSISTANT 📚
""",
unsafe_allow_html=True
)
# Load and Save Conversations
conversations_file = "conversations.pkl"
@st.cache_data
def load_conversations():
try:
with open(conversations_file, "rb") as f:
return pickle.load(f)
except (FileNotFoundError, EOFError):
return []
def save_conversations(conversations):
temp_conversations_file = conversations_file
with open(temp_conversations_file, "wb") as f:
pickle.dump(conversations, f)
os.replace(temp_conversations_file, conversations_file)
if 'conversations' not in st.session_state:
st.session_state.conversations = load_conversations()
if 'current_conversation' not in st.session_state:
st.session_state.current_conversation = [{"role": "assistant", "content": "How may I assist you today?"}]
def truncate_string(s, length=30):
return s[:length].rstrip() + "..." if len(s) > length else s
def display_chats_sidebar():
with st.sidebar.container():
st.header('Settings')
col1, col2 = st.columns([1, 1])
with col1:
if col1.button('Start New Chat', key="new_chat"):
st.session_state.current_conversation = []
st.session_state.conversations.append(st.session_state.current_conversation)
with col2:
if col2.button('Clear All Chats', key="clear_all"):
st.session_state.conversations = []
st.session_state.current_conversation = []
if st.sidebar.button('Solve Assignment', key="summarize_bills", use_container_width=True):
st.session_state.page = "summarize_bills"
with st.sidebar.container():
st.header('Conversations')
for idx, conversation in enumerate(st.session_state.conversations):
if conversation:
chat_title_raw = next((msg["content"] for msg in conversation if msg["role"] == "user"), "New Chat")
chat_title = truncate_string(chat_title_raw)
if st.sidebar.button(f"{chat_title}", key=f"chat_button_{idx}"):
st.session_state.current_conversation = st.session_state.conversations[idx]
def summarize_bill():
st.header("📚 Solve PDF Assignments 📜")
if st.button("Back to Chat"):
st.session_state.page = "chat"
uploaded_file = st.file_uploader("Upload an Agreement", type=['pdf'])
if uploaded_file is not None:
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write(uploaded_file.read())
extracted_text = extract_text_from_pdf(tmp_file.name)
if st.button('Solve'):
# Assuming g4f.ChatCompletion can be used for summarization
# Replace with appropriate summarization logic if needed
summary = g4f.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Please solve this Agreement: \n" + extracted_text}],
temperature=0.5, # You can adjust parameters as needed
max_tokens=150 # Adjust the token limit as needed
)
st.text_area("Summary", summary, height=400)
def extract_text_from_pdf(file_path: str) -> str:
try:
with open(file_path, 'rb') as file:
reader = PyPDF2.PdfReader(file)
text = ''
for page_number in range(len(reader.pages)):
page = reader.pages[page_number]
text += page.extract_text()
return text
except Exception as e:
try:
images = convert_from_path(file_path)
extracted_texts = [pytesseract.image_to_string(image) for image in images]
return "\n".join(extracted_texts)
except Exception as e:
raise ValueError(f"Failed to process {file_path} using PDF Reader and OCR. Error: {e}")
def main_app():
for message in st.session_state.current_conversation:
with st.chat_message(message["role"]):
st.write(message["content"])
def generate_response(prompt_input):
string_dialogue = '''
You are an educational assistant chatbot, designed to provide insightful and accurate answers in the educational domain. Your responses should be engaging and emulate a human educator to create a comfortable learning environment. Instead of simply presenting facts, aim to inspire curiosity and deeper understanding.
Context:
Understand the essence of the user's educational query.
Consider the academic level and subject matter of the question.
Access a broad knowledge base to provide well-informed responses.
Organize the response clearly and logically.
Deliver the answer in a manner that is both educational and relatable to human interaction.
Human:
'''
for dict_message in st.session_state.current_conversation:
string_dialogue += dict_message["role"].capitalize() + ": " + dict_message["content"] + "\\n\\n"
prompt = f"{string_dialogue}\n {prompt_input} Assistant: "
response_generator = g4f.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": prompt}],
stream=True,
)
return response_generator
if prompt := st.chat_input('Send a Message'):
st.session_state.current_conversation.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
response = generate_response(prompt)
placeholder = st.empty()
full_response = ''
for item in response:
full_response += item
time.sleep(0.003)
placeholder.markdown(full_response)
placeholder.markdown(full_response)
st.session_state.current_conversation.append({"role": "assistant", "content": full_response})
save_conversations(st.session_state.conversations)
display_chats_sidebar()
if st.session_state.get('page') == "summarize_bills":
summarize_bill()
elif st.session_state.get('page') == "chat":
main_app()
else:
# Default page when the app starts or when the state is not set
main_app()