#!/usr/bin/env python import os import random import uuid import base64 import gradio as gr import numpy as np from PIL import Image import spaces import torch import glob from datetime import datetime import zipfile import io from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler DESCRIPTION = """# DALL•E 3 XL v2 High Fi""" def create_download_link(filename): with open(filename, "rb") as file: encoded_string = base64.b64encode(file.read()).decode('utf-8') download_link = f'Download Image' return download_link def save_image(img, prompt): timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") filename = f"{timestamp}_{prompt[:50]}.png" # Limit filename length img.save(filename) return filename def randomize_seed_fn(seed: int, randomize_seed: bool) -> int: if randomize_seed: seed = random.randint(0, MAX_SEED) return seed def get_image_gallery(): image_files = glob.glob("*.png") image_files.sort(key=os.path.getmtime, reverse=True) return image_files def create_zip_archive(): image_files = get_image_gallery() zip_buffer = io.BytesIO() with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: for image_file in image_files: zip_file.write(image_file) return zip_buffer.getvalue() MAX_SEED = np.iinfo(np.int32).max if not torch.cuda.is_available(): DESCRIPTION += "\n

Running on CPU 🥶 This demo may not work on CPU.

" USE_TORCH_COMPILE = 0 ENABLE_CPU_OFFLOAD = 0 if torch.cuda.is_available(): pipe = StableDiffusionXLPipeline.from_pretrained( "fluently/Fluently-XL-v4", torch_dtype=torch.float16, use_safetensors=True, ) pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config) pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle") pipe.set_adapters("dalle") pipe.to("cuda") @spaces.GPU(enable_queue=True) def generate( prompt: str, negative_prompt: str = "", use_negative_prompt: bool = False, seed: int = 0, width: int = 1024, height: int = 1024, guidance_scale: float = 3, randomize_seed: bool = False, progress=gr.Progress(track_tqdm=True), ): seed = int(randomize_seed_fn(seed, randomize_seed)) if not use_negative_prompt: negative_prompt = "" images = pipe( prompt=prompt, negative_prompt=negative_prompt, width=width, height=height, guidance_scale=guidance_scale, num_inference_steps=20, num_images_per_prompt=1, cross_attention_kwargs={"scale": 0.65}, output_type="pil", ).images image_paths = [save_image(img, prompt) for img in images] download_links = [create_download_link(path) for path in image_paths] return image_paths, seed, download_links, get_image_gallery() examples = [ "A bustling cityscape at sunset, where sleek, translucent skyscrapers shimmer with holographic displays. Gentle AI-powered drones weave through the air, delivering packages and tending to floating gardens. In the foreground, a diverse group of humans and lifelike androids share a laugh at a streetside café, their animated conversation bringing warmth to the scene.", "An awe-inspiring orbital habitat, a massive ring structure gleaming against the starry backdrop of space. Its curved interior surface is a lush, verdant landscape dotted with futuristic buildings. AI-managed ecosystems flourish, and robotic caretakers tend to exotic alien plants in crystal-domed conservatories. Earth hangs like a jewel in the distance, visible through the habitat's transparent sections.", "A cozy, high-tech living room bathed in soft, ambient light. An elderly woman sits in a hover-chair, her eyes twinkling with joy as she interacts with a holographic display. Surrounding her are attentive, sleek robotic assistants, their designs seamlessly blending form and function. The room's smart surfaces ripple with gentle, soothing patterns, responding to the occupants' emotions.", "A breathtaking underwater research station, its transparent domes revealing a vibrant coral reef teeming with bioluminescent life. Robotic submersibles with graceful, fish-like designs glide through the water, collecting data and nurturing endangered species. Inside, scientists work alongside AI counterparts, their excited gestures visible as they make a groundbreaking discovery.", "A serene forest glade where ancient trees intertwine with bioluminescent technological enhancements. Tiny robotic pollinators flit from flower to flower, their delicate wings shimmering in the dappled sunlight. A group of children, some human and some android, gather around a wise AI embodied in a gnarled tree trunk, listening intently to its stories of ages past and visions of the future.", "An artist's studio aboard a space station, its large windows offering a stunning view of a distant nebula. The artist, a highly advanced android with expressive eyes, manipulates a floating, three-dimensional canvas of light and color. Robotic arms and AI-driven nanobots assist in bringing the artist's vision to life, creating a mesmerizing, ever-changing sculpture that seems to dance with the cosmos.", "A vast, underground hydroponic farm, its tiered levels stretching as far as the eye can see. Advanced AI systems control the perfect growing conditions for a myriad of crops. Robotic tenders move efficiently between the rows, their gentle touch nurturing the plants. In a central hub, a diverse team of human and AI agronomists collaborate, developing new strains of nutrient-rich foods to sustain the growing population.", "A futuristic classroom where students of various species – human, android, and alien – gather around a shimmering holographic display. The AI teacher, appearing as a swirling vortex of light and knowledge, adapts its form and teaching style to each student's needs. The walls of the room shift and change, transforming into immersive historical scenes or complex scientific models as the lesson progresses." ] css = ''' .gradio-container{max-width: 1024px !important} h1{text-align:center} footer { visibility: hidden } ''' with gr.Blocks(css=css, theme="pseudolab/huggingface-korea-theme") as demo: gr.Markdown(DESCRIPTION) with gr.Group(): with gr.Row(): prompt = gr.Text( label="Prompt", show_label=False, max_lines=1, placeholder="Enter your prompt", container=False, ) run_button = gr.Button("Run", scale=0) result = gr.Gallery(label="Result", columns=1, preview=True, show_label=False) with gr.Accordion("Advanced options", open=False): use_negative_prompt = gr.Checkbox(label="Use negative prompt", value=True) negative_prompt = gr.Text( label="Negative prompt", lines=4, max_lines=6, value="""(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, (NSFW:1.25)""", placeholder="Enter a negative prompt", visible=True, ) seed = gr.Slider( label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0, visible=True ) randomize_seed = gr.Checkbox(label="Randomize seed", value=True) with gr.Row(visible=True): width = gr.Slider( label="Width", minimum=512, maximum=2048, step=8, value=1920, ) height = gr.Slider( label="Height", minimum=512, maximum=2048, step=8, value=1080, ) with gr.Row(): guidance_scale = gr.Slider( label="Guidance Scale", minimum=0.1, maximum=20.0, step=0.1, value=20.0, ) image_gallery = gr.Gallery(label="Generated Images", show_label=True, columns=4, height="auto") download_all_button = gr.Button("Download All Images") gr.Examples( examples=examples, inputs=prompt, outputs=[result, seed], fn=generate, cache_examples=False, ) use_negative_prompt.change( fn=lambda x: gr.update(visible=x), inputs=use_negative_prompt, outputs=negative_prompt, api_name=False, ) def update_gallery(): return gr.update(value=get_image_gallery()) gr.on( triggers=[ prompt.submit, negative_prompt.submit, run_button.click, ], fn=generate, inputs=[ prompt, negative_prompt, use_negative_prompt, seed, width, height, guidance_scale, randomize_seed, ], outputs=[result, seed, gr.HTML(visible=False), image_gallery], api_name="run", ) download_all_button.click( fn=create_zip_archive, inputs=[], outputs=gr.File(label="Download ZIP"), ) demo.load(fn=update_gallery, outputs=image_gallery) if __name__ == "__main__": demo.queue(max_size=20).launch(show_api=False, debug=False)