import streamlit as st from transformers import pipeline import torch from transformers import pipeline model_id = "meta-llama/Llama-3.2-3B-Instruct" pipe = pipeline( "text-generation", model=model_id, torch_dtype=torch.bfloat16, device_map="auto", ) messages = [ {"role": "system", "content": "You are a chatbot that writes Shakespeare given a prompt, the text you write should be 25 lines long."}, ] def poet(text): messages.append({"role": "user", "content": text}) outputs = pipe( messages, max_new_tokens=256, ) print(outputs[0]["generated_text"][-1]) st.title("Shakespeare Ai") st.write("A space made to allow people to create shakespeare like text with images!") prompt = st.text_input("Enter your prompt: ") st.button("Generate Shakespeare") st.write(poet(prompt))