Spaces:
Runtime error
Runtime error
from transformers import pipeline | |
import streamlit as st | |
# Load the Gemma-7b text-generation pipeline | |
# generator = pipeline("text-generation", model="google/gemma-7b") | |
# Load model directly | |
from transformers import AutoConfig, AutoModel | |
config = AutoConfig.from_pretrained("TheBloke/WhiteRabbitNeo-13B-GGUF") | |
model = AutoModel.from_config(config) | |
model.load_state_dict(torch.load("path/to/model/weights.bin")) | |
st.title("Project Prompt Generator") | |
# User input fields | |
topic = st.text_input("Enter a project topic:") | |
keywords = st.multiselect("Choose relevant keywords:", ["sustainability", "data analysis", "education", "technology"], default=[]) | |
# Generate prompts button | |
if st.button("Generate Prompts"): | |
prompts = generate_prompts(topic, keywords) | |
# Display generated prompts | |
st.subheader("Generated Prompts:") | |
for prompt in prompts: | |
st.write(f"* {prompt}") | |
# Function to generate project prompts | |
def generate_prompts(topic, keywords): | |
""" | |
Generates project prompts based on user input. | |
Args: | |
topic: The main theme or area of the project. | |
keywords: A list of relevant keywords chosen by the user. | |
Returns: | |
A list of generated project prompts. | |
""" | |
prompts = [] | |
for _ in range(3): # Generate 3 prompts | |
prompt = model( | |
prompt=f"Generate a project prompt related to {topic} using the keywords {', '.join(keywords)}.", | |
max_length=150, | |
num_return_sequences=1 | |
)[0]["generated_text"] | |
prompts.append(prompt) | |
return prompts | |
# Run the app | |
if __name__ == "__main__": | |
st.run() | |