Spaces:
Running
Running
import os | |
from dotenv import load_dotenv | |
import gradio as gr | |
from langchain_huggingface import HuggingFaceEndpoint | |
# Load environment variables | |
load_dotenv() | |
HF_TOKEN = os.getenv("HF_TOKEN") | |
# Initialize the Hugging Face endpoint | |
llm = HuggingFaceEndpoint( | |
repo_id="mistralai/Mistral-7B-Instruct-v0.3", # Replace with the desired Hugging Face model | |
huggingfacehub_api_token=HF_TOKEN.strip(), | |
temperature=0.7, | |
max_new_tokens=300 | |
) | |
# Recipe generation function | |
def suggest_recipes(ingredients): | |
prompt = ( | |
f"You are an expert chef. Please suggest 3 unique recipes using the following " | |
f"ingredients: {ingredients}. Provide a title for each recipe, include " | |
f"preparation time, and list step-by-step directions." | |
) | |
try: | |
response = llm(prompt) | |
# Format response into multiple recipes | |
generated_text = response.content | |
recipes = generated_text.split("Recipe") | |
structured_recipes = [] | |
for i, recipe in enumerate(recipes): | |
if recipe.strip(): # Ensure non-empty recipe | |
structured_recipes.append(f"Recipe {i+1}:\n{recipe.strip()}") | |
return "\n\n".join(structured_recipes) | |
except Exception as e: | |
return f"Error: {e}" | |
# Gradio interface | |
with gr.Blocks() as app: | |
gr.Markdown("# AI Recipe Generator") | |
gr.Markdown("Enter the ingredients you have, and this app will generate 3 unique recipes along with preparation times!") | |
with gr.Row(): | |
ingredients_input = gr.Textbox( | |
label="Enter Ingredients (comma-separated):", | |
placeholder="e.g., eggs, milk, flour" | |
) | |
recipe_output = gr.Textbox(label="Suggested Recipes:", lines=15, interactive=False) | |
generate_button = gr.Button("Get Recipes") | |
generate_button.click(suggest_recipes, inputs=ingredients_input, outputs=recipe_output) | |
# Launch the app | |
app.launch() | |