Yntec commited on
Commit
0de48e4
·
verified ·
1 Parent(s): 679619d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +131 -138
app.py CHANGED
@@ -1,143 +1,136 @@
1
  import gradio as gr
2
- # import os
3
- # import sys
4
- # from pathlib import Path
5
- import time
6
-
7
- models =[
8
- "Yntec/InsaneSurreality",
9
- "Yntec/WinningBlunder",
10
- "Yntec/beLIEve",
11
- "Yntec/Hyperlink",
12
- "Yntec/HyperRemix",
13
- "Yntec/HyperPhotoGASM",
14
- "Yntec/Roleplay",
15
- "Yntec/Chip_n_DallE",
16
- "Yntec/ArtisticVision",
17
- ]
18
-
19
- model_functions = {}
20
- model_idx = 1
21
- for model_path in models:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  try:
23
- model_functions[model_idx] = gr.Interface.load(f"models/{model_path}", live=False, preprocess=True, postprocess=False)
24
- except Exception as error:
25
- def the_fn(txt):
26
- return None
27
- model_functions[model_idx] = gr.Interface(fn=the_fn, inputs=["text"], outputs=["image"])
28
- model_idx+=1
29
-
30
-
31
- def send_it_idx(idx):
32
- def send_it_fn(prompt):
33
- output = (model_functions.get(str(idx)) or model_functions.get(str(1)))(prompt)
34
- return output
35
- return send_it_fn
36
-
37
- def get_prompts(prompt_text):
38
- return prompt_text
39
-
40
- def clear_it(val):
41
- if int(val) != 0:
42
- val = 0
43
- else:
44
- val = 0
45
- pass
46
- return val
47
-
48
- def all_task_end(cnt,t_stamp):
49
- to = t_stamp + 360
50
- et = time.time()
51
- if et > to and t_stamp != 0:
52
- d = gr.update(value=0)
53
- tog = gr.update(value=1)
54
- #print(f'to: {to} et: {et}')
55
- else:
56
- if cnt != 0:
57
- d = gr.update(value=et)
58
- else:
59
- d = gr.update(value=0)
60
- tog = gr.update(value=0)
61
- #print (f'passing: to: {to} et: {et}')
62
- pass
63
- return d, tog
64
-
65
- def all_task_start():
66
- print("\n\n\n\n\n\n\n")
67
- t = time.gmtime()
68
- t_stamp = time.time()
69
- current_time = time.strftime("%H:%M:%S", t)
70
- return gr.update(value=t_stamp), gr.update(value=t_stamp), gr.update(value=0)
71
-
72
- def clear_fn():
73
- nn = len(models)
74
- return tuple([None, *[None for _ in range(nn)]])
75
-
76
-
77
-
78
- with gr.Blocks(title="SD Models") as my_interface:
79
- with gr.Column(scale=12):
80
- # with gr.Row():
81
- # gr.Markdown("""- Primary prompt: 你想画的内容(英文单词,如 a cat, 加英文逗号效果更好;点 Improve 按钮进行完善)\n- Real prompt: 完善后的提示词,出现后再点右边的 Run 按钮开始运行""")
82
  with gr.Row():
83
- with gr.Row(scale=6):
84
- primary_prompt=gr.Textbox(label="Prompt", value="")
85
- # real_prompt=gr.Textbox(label="Real prompt")
86
- with gr.Row(scale=6):
87
- # improve_prompts_btn=gr.Button("Improve")
88
- with gr.Row():
89
- run=gr.Button("Run",variant="primary")
90
- clear_btn=gr.Button("Clear")
 
 
 
 
91
  with gr.Row():
92
- sd_outputs = {}
93
- model_idx = 1
94
- for model_path in models:
95
- with gr.Column(scale=3, min_width=320):
96
- with gr.Box():
97
- sd_outputs[model_idx] = gr.Image(label=model_path)
98
- pass
99
- model_idx += 1
100
- pass
101
- pass
102
-
103
- with gr.Row(visible=False):
104
- start_box=gr.Number(interactive=False)
105
- end_box=gr.Number(interactive=False)
106
- tog_box=gr.Textbox(value=0,interactive=False)
107
-
108
- start_box.change(
109
- all_task_end,
110
- [start_box, end_box],
111
- [start_box, tog_box],
112
- every=1,
113
- show_progress=True)
114
-
115
- primary_prompt.submit(all_task_start, None, [start_box, end_box, tog_box])
116
- run.click(all_task_start, None, [start_box, end_box, tog_box])
117
- runs_dict = {}
118
- model_idx = 1
119
- for model_path in models:
120
- runs_dict[model_idx] = run.click(model_functions[model_idx], inputs=[primary_prompt], outputs=[sd_outputs[model_idx]])
121
- model_idx += 1
122
- pass
123
- pass
124
-
125
- # improve_prompts_btn_clicked=improve_prompts_btn.click(
126
- # get_prompts,
127
- # inputs=[primary_prompt],
128
- # outputs=[primary_prompt],
129
- # cancels=list(runs_dict.values()))
130
- clear_btn.click(
131
- clear_fn,
132
- None,
133
- [primary_prompt, *list(sd_outputs.values())],
134
- cancels=[*list(runs_dict.values())])
135
- tog_box.change(
136
- clear_it,
137
- tog_box,
138
- tog_box,
139
- cancels=[*list(runs_dict.values())])
140
-
141
- my_interface.queue(concurrency_count=600, status_update_rate=1)
142
- my_interface.launch(inline=True, show_api=False)
143
 
 
1
  import gradio as gr
2
+ from random import randint
3
+ from all_models import models
4
+
5
+ from externalmod import gr_Interface_load, randomize_seed
6
+
7
+ import asyncio
8
+ import os
9
+ from threading import RLock
10
+ lock = RLock()
11
+ HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
12
+
13
+
14
+ def load_fn(models):
15
+ global models_load
16
+ models_load = {}
17
+
18
+ for model in models:
19
+ if model not in models_load.keys():
20
+ try:
21
+ m = gr_Interface_load(f'models/{model}', hf_token=HF_TOKEN)
22
+ except Exception as error:
23
+ print(error)
24
+ m = gr.Interface(lambda: None, ['text'], ['image'])
25
+ models_load.update({model: m})
26
+
27
+
28
+ load_fn(models)
29
+
30
+
31
+ num_models = 9
32
+
33
+ default_models = models[:num_models]
34
+ inference_timeout = 600
35
+ MAX_SEED=666666666
36
+ starting_seed = randint(666666000, 666666666)
37
+
38
+ def extend_choices(choices):
39
+ return choices[:num_models] + (num_models - len(choices[:num_models])) * ['NA']
40
+
41
+
42
+ def update_imgbox(choices):
43
+ choices_plus = extend_choices(choices[:num_models])
44
+ return [gr.Image(None, label=m, visible=(m!='NA')) for m in choices_plus]
45
+
46
+ async def infer(model_str, prompt, seed=1, timeout=inference_timeout):
47
+ from pathlib import Path
48
+ kwargs = {}
49
+ noise = ""
50
+ kwargs["seed"] = seed
51
+ task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
52
+ prompt=f'{prompt} {noise}', **kwargs, token=HF_TOKEN))
53
+ await asyncio.sleep(0)
54
+ try:
55
+ result = await asyncio.wait_for(task, timeout=timeout)
56
+ except (Exception, asyncio.TimeoutError) as e:
57
+ print(e)
58
+ print(f"Task timed out: {model_str}")
59
+ if not task.done(): task.cancel()
60
+ result = None
61
+ if task.done() and result is not None:
62
+ with lock:
63
+ png_path = "image.png"
64
+ result.save(png_path)
65
+ image = str(Path(png_path).resolve())
66
+ return image
67
+ return None
68
+
69
+
70
+
71
+
72
+ def gen_fnseed(model_str, prompt, seed=1):
73
+ if model_str == 'NA':
74
+ return None
75
  try:
76
+ loop = asyncio.new_event_loop()
77
+ result = loop.run_until_complete(infer(model_str, prompt, seed, inference_timeout))
78
+ except (Exception, asyncio.CancelledError) as e:
79
+ print(e)
80
+ print(f"Task aborted: {model_str}")
81
+ result = None
82
+ with lock:
83
+ image = "https://huggingface.co/spaces/Yntec/ToyWorld/resolve/main/error.png"
84
+ result = image
85
+ finally:
86
+ loop.close()
87
+ return result
88
+
89
+ with gr.Blocks(theme='Yntec/HaleyCH_Theme_Orange_Green') as demo:
90
+ with gr.Tab('🤗 October 2024 is the date this space was launched (they were brand new back then!) 🤗'):
91
+ txt_input = gr.Textbox(label='Your prompt:', lines=4)
92
+ gen_button = gr.Button('Generate up to 9 images in up to 3 minutes total')
93
+ with gr.Row():
94
+ seed = gr.Slider(label="Use a seed to replicate the same image later (maximum 666666666)", minimum=0, maximum=MAX_SEED, step=1, value=starting_seed, scale=3)
95
+ seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary", scale=1)
96
+ seed_rand.click(randomize_seed, None, [seed], queue=False)
97
+ #stop_button = gr.Button('Stop', variant = 'secondary', interactive = False)
98
+
99
+ gen_button.click(lambda s: gr.update(interactive = True), None)
100
+ gr.HTML(
101
+ """
102
+ <div style="text-align: center; max-width: 1200px; margin: 0 auto;">
103
+ <div>
104
+ <body>
105
+ <div class="center"><p style="margin-bottom: 10px; color: #000000;">Scroll down to see more images and select models.</p>
106
+ </div>
107
+ </body>
108
+ </div>
109
+ </div>
110
+ """
111
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  with gr.Row():
113
+ output = [gr.Image(label = m, min_width=480) for m in default_models]
114
+ current_models = [gr.Textbox(m, visible = False) for m in default_models]
115
+
116
+ for m, o in zip(current_models, output):
117
+ gen_event = gr.on(triggers=[gen_button.click, txt_input.submit], fn=gen_fnseed,
118
+ inputs=[m, txt_input, seed], outputs=[o], concurrency_limit=None, queue=False)
119
+ #stop_button.click(lambda s: gr.update(interactive = False), None, stop_button, cancels = [gen_event])
120
+ with gr.Accordion('Model selection'):
121
+ model_choice = gr.CheckboxGroup(models, label = 'Untick the models you will not be using', value=default_models, interactive=True)
122
+ #model_choice = gr.CheckboxGroup(models, label = f'Choose up to {num_models} different models from the 2 available! Untick them to only use one!', value = default_models, multiselect = True, max_choices = num_models, interactive = True, filterable = False)
123
+ model_choice.change(update_imgbox, model_choice, output)
124
+ model_choice.change(extend_choices, model_choice, current_models)
125
  with gr.Row():
126
+ gr.HTML(
127
+ """
128
+ <div class="footer">
129
+ <p> For more than a hundred times more models (that's not a typo) check out <a href="https://huggingface.co/spaces/Yntec/ToyWorld">Toy World</a>!</a>
130
+ </p>
131
+ """
132
+ )
133
+
134
+ demo.queue(default_concurrency_limit=200, max_size=200)
135
+ demo.launch(show_api=False, max_threads=400)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136