Spaces:
Running
on
Zero
Running
on
Zero
File size: 2,835 Bytes
feb3220 d0fbcd0 feb3220 3f8fe83 feb3220 d0fbcd0 feb3220 6a6f2a6 feb3220 d0fbcd0 feb3220 3f8fe83 feb3220 3f8fe83 2d40e1e feb3220 3f8fe83 feb3220 3f8fe83 feb3220 57cf21d feb3220 3f8fe83 feb3220 d0fbcd0 feb3220 3f8fe83 feb3220 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
#!/usr/bin/env python
import gradio as gr
import spaces
from model import Model
from settings import CACHE_EXAMPLES, MAX_SEED
from utils import randomize_seed_fn
def create_demo(model: Model) -> gr.Blocks:
examples = [
"A chair that looks like an avocado",
"An airplane that looks like a banana",
"A spaceship",
"A birthday cupcake",
"A chair that looks like a tree",
"A green boot",
"A penguin",
"Ube ice cream cone",
"A bowl of vegetables",
]
@spaces.GPU
def process_example_fn(prompt: str) -> str:
return model.run_text(prompt)
@spaces.GPU
def run(prompt: str, seed: int, guidance_scale: float, num_inference_steps: int) -> str:
return model.run_text(prompt, seed, guidance_scale, num_inference_steps)
with gr.Blocks() as demo:
with gr.Box():
with gr.Row(elem_id="prompt-container"):
prompt = gr.Text(
label="Prompt",
show_label=False,
max_lines=1,
placeholder="Enter your prompt",
container=False,
)
run_button = gr.Button("Run", scale=0)
result = gr.Model3D(label="Result", show_label=False)
with gr.Accordion("Advanced options", open=False):
seed = gr.Slider(
label="Seed",
minimum=0,
maximum=MAX_SEED,
step=1,
value=0,
)
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
guidance_scale = gr.Slider(
label="Guidance scale",
minimum=1,
maximum=20,
step=0.1,
value=15.0,
)
num_inference_steps = gr.Slider(
label="Number of inference steps",
minimum=2,
maximum=100,
step=1,
value=64,
)
gr.Examples(
examples=examples,
inputs=prompt,
outputs=result,
fn=process_example_fn,
cache_examples=CACHE_EXAMPLES,
)
inputs = [
prompt,
seed,
guidance_scale,
num_inference_steps,
]
gr.on(
triggers=[prompt.submit, run_button.click],
fn=randomize_seed_fn,
inputs=[seed, randomize_seed],
outputs=seed,
queue=False,
api_name=False,
).then(
fn=run,
inputs=inputs,
outputs=result,
api_name="text-to-3d",
)
return demo
|