spedrox-sac's picture
Create app.py
d1058b3 verified
import os
import json
import csv
from dotenv import load_dotenv
import streamlit as st
from langchain_huggingface import HuggingFaceEndpoint
from langchain_core.output_parsers import StrOutputParser
# Load environment variables
load_dotenv()
hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
# Initialize session state variables
if "past" not in st.session_state:
st.session_state["past"] = []
if "generated" not in st.session_state:
st.session_state["generated"] = []
# Initialize parser and model
parser = StrOutputParser()
st.title("Llama-3.2-3B-Instruct")
# Sidebar for LLM parameters
st.sidebar.header("LLM Parameters")
temperature = st.sidebar.slider("Temperature", 0.0, 1.0, 0.8, 0.1)
max_length = st.sidebar.slider("Max Length", 100, 500, 100, 50)
llm = HuggingFaceEndpoint(
repo_id="meta-llama/Llama-3.2-3B-Instruct",
max_length=max_length,
temperature=temperature,
token_id=hf_token
)
# Input section
col1, col2 = st.columns(2)
with col1:
user_input = st.text_area("Enter your query:")
with col2:
uploaded_file = st.file_uploader("Upload a file", type=["txt", "csv", "json"])
submit = st.button("Submit")
def parse_file(file):
try:
if file.type == "application/json":
return json.loads(file.read().decode("utf-8"))
elif file.type == "text/csv":
return list(csv.reader(file.read().decode("utf-8").splitlines()))
else:
return file.read().decode("utf-8")
except Exception as e:
st.error(f"Error reading file: {e}")
return None
if submit:
if user_input:
with st.spinner("Thinking..."):
try:
result = llm.invoke(user_input)
output = parser.parse(result)
st.session_state["past"].append(user_input)
st.session_state["generated"].append(output)
st.success("Response generated.")
except Exception as e:
st.error(f"Error invoking LLM: {e}")
elif uploaded_file:
file_content = parse_file(uploaded_file)
if file_content:
with st.spinner("Thinking..."):
try:
result = llm.invoke(str(file_content))
output = parser.parse(result)
st.session_state["past"].append(file_content)
st.session_state["generated"].append(output)
st.success("Response generated.")
except Exception as e:
st.error(f"Error invoking LLM: {e}")
else:
st.warning("Please enter a query or upload a file.")
# Display chat history
if st.session_state["generated"]:
st.markdown("### Chat History")
for i in range(len(st.session_state["generated"])-1, -1, -1):
st.markdown(f"**You:** {st.session_state['past'][i]}")
st.markdown(f"**Llama:** {st.session_state['generated'][i]}")