File size: 1,768 Bytes
21bcc9a
0a31b84
21bcc9a
 
 
 
 
 
 
 
 
 
6b0847f
 
21bcc9a
 
 
 
 
 
 
6b0847f
21bcc9a
 
c8e2317
21bcc9a
b5ecf2f
 
21bcc9a
b5ecf2f
21bcc9a
 
4206de3
21bcc9a
 
 
 
 
 
4716bf2
21bcc9a
b5ecf2f
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import gradio as gr
import torch
#from torch import autocast // only for GPU

from PIL import Image

import os
MY_SECRET_TOKEN=os.environ.get('HF_TOKEN_SD')

from diffusers import StableDiffusionPipeline
#from diffusers import StableDiffusionImg2ImgPipeline

def empty_checker(images, **kwargs):return images, False

print("start generating")

YOUR_TOKEN=MY_SECRET_TOKEN

device="cpu"

pipe = StableDiffusionPipeline.from_pretrained("AkiKagura/mkgen-diffusion", use_auth_token=YOUR_TOKEN)
pipe.safety_checker = empty_checker
pipe.to(device)

gallery = gr.Gallery(label="Generated images", show_label=False, elem_id="gallery").style(grid=[1], height="auto")

def infer(prompt, guide, steps, seed, img_w, img_h): 
    generator = torch.Generator('cpu').manual_seed(seed)
    #image = pipe(prompt, init_image=init_image)["sample"][0]
    images_list = pipe([prompt] * 1, guidance_scale=guide, num_inference_steps=steps, width=img_w, height=img_h)    #TODO
    images = []
    for i, image in enumerate(images_list["images"]):
        images.append(image)
    
    return images

print("okay")

title="Marco Generation"
description="Use 'mkmk woman' to get Marco pics. <br />Warning: Slow process... about 10 min inference time." 

gr.Interface(fn=infer, inputs=["text",
    gr.Slider(2, 15, value = 7, label = 'Guidence Scale'),
    gr.Slider(10, 50, value = 25, step = 1, label = 'Number of Iterations'),
    gr.Slider(label = "Seed", minimum = 0, maximum = 2147483647, step = 1, randomize = True),
    gr.Slider(label='Width', minimum = 512, maximum = 768, step = 256, value = 512),
    gr.Slider(label='Height', minimum = 512, maximum = 768, step = 256, value = 512)], outputs=gallery,title=title,description=description).queue(max_size=100).launch(enable_queue=True)