import streamlit as st | |
from langchain_community.llms import Ollama | |
# Initialize the language model | |
llm = Ollama(model="tinyllama") | |
# Streamlit UI elements | |
st.title("Language Model Invocation") | |
st.write("Enter a prompt to get a response from the language model.") | |
# Text input for prompt | |
prompt = st.text_input("Enter a prompt:") | |
# Button to invoke the model | |
if st.button("Submit"): | |
if prompt: | |
# Generate the response | |
response = llm.invoke(prompt) | |
st.write("Response:") | |
st.write(response) | |
else: | |
st.write("Please enter a prompt.") | |