Diffusion / app.py
aexyb's picture
Update app.py
244660d verified
raw
history blame
605 Bytes
import gradio as gr
import concurrent.futures
# Load the model into RAM
model = gr.load("models/mann-e/Mann-E_Dreams")
def interact(input):
# Define the function for user interaction
response = model(input)
return response
# Use ThreadPoolExecutor to manage the threads
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
# Create a Gradio interface with the loaded model
interface = gr.Interface(fn=interact, inputs="text", outputs="image")
# Handle the interactions with Gradio
interface.launch()