AI_Chatbot / app.py
Abs6187's picture
Update app.py
e5d7180 verified
raw
history blame
1.03 kB
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM
# Load the tokenizer and model from local files
@st.cache_resource
def load_model():
tokenizer = AutoTokenizer.from_pretrained("./", config="config.json")
model = AutoModelForCausalLM.from_pretrained("./")
return tokenizer, model
# Initialize the model and tokenizer
tokenizer, model = load_model()
# Set up Streamlit page configuration
st.set_page_config(page_title="Legal AI Chatbot", layout="centered")
st.title("Legal AI Chatbot")
st.write("This chatbot provides responses based on a legal language model.")
# User input
user_input = st.text_input("Enter your query:")
if user_input:
# Tokenize and generate response
inputs = tokenizer.encode(user_input, return_tensors="pt")
outputs = model.generate(inputs, max_length=150, num_return_sequences=1)
# Decode and display the output
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
st.text_area("Response:", response, height=200)