Sergidev commited on
Commit
7cee559
1 Parent(s): 55cafd9
Files changed (1) hide show
  1. app.py +24 -3
app.py CHANGED
@@ -12,11 +12,11 @@ from PIL import Image, PngImagePlugin
12
  from datetime import datetime
13
  from diffusers.models import AutoencoderKL
14
  from diffusers import StableDiffusionXLPipeline, StableDiffusionXLImg2ImgPipeline
 
15
 
16
  logging.basicConfig(level=logging.INFO)
17
  logger = logging.getLogger(__name__)
18
 
19
-
20
  if not torch.cuda.is_available():
21
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU. </p>"
22
  IS_COLAB = utils.is_google_colab() or os.getenv("IS_COLAB") == "1"
@@ -247,6 +247,14 @@ def generate_and_update_history(*args, **kwargs):
247
  generation_history.pop()
248
  return images[0], json.dumps(metadata, indent=2), update_history_list()
249
 
 
 
 
 
 
 
 
 
250
  if torch.cuda.is_available():
251
  pipe = load_pipeline(MODEL)
252
  logger.info("Loaded on Device!")
@@ -254,8 +262,6 @@ else:
254
  pipe = None
255
 
256
  with gr.Blocks(css="style.css") as demo:
257
-
258
-
259
  gr.Markdown(DESCRIPTION)
260
 
261
  gr.DuplicateButton(
@@ -360,6 +366,9 @@ with gr.Blocks(css="style.css") as demo:
360
  json_input = gr.TextArea(label="Edit/Paste JSON Parameters", placeholder="Paste or edit JSON parameters here")
361
  generate_from_json = gr.Button("Generate from JSON")
362
 
 
 
 
363
  with gr.Accordion("Generation History", open=False) as history_accordion:
364
  history_gallery = gr.Gallery(
365
  label="History",
@@ -454,5 +463,17 @@ with gr.Blocks(css="style.css") as demo:
454
  inputs=inputs,
455
  outputs=[result, gr_metadata, history_gallery],
456
  )
 
 
 
 
 
 
 
 
 
 
 
 
457
 
458
  demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)
 
12
  from datetime import datetime
13
  from diffusers.models import AutoencoderKL
14
  from diffusers import StableDiffusionXLPipeline, StableDiffusionXLImg2ImgPipeline
15
+ import random
16
 
17
  logging.basicConfig(level=logging.INFO)
18
  logger = logging.getLogger(__name__)
19
 
 
20
  if not torch.cuda.is_available():
21
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU. </p>"
22
  IS_COLAB = utils.is_google_colab() or os.getenv("IS_COLAB") == "1"
 
247
  generation_history.pop()
248
  return images[0], json.dumps(metadata, indent=2), update_history_list()
249
 
250
+ # Load the character list
251
+ with open('characterfull.txt', 'r') as f:
252
+ characters = [line.strip() for line in f.readlines()]
253
+
254
+ # Function to get a random character
255
+ def get_random_character():
256
+ return random.choice(characters)
257
+
258
  if torch.cuda.is_available():
259
  pipe = load_pipeline(MODEL)
260
  logger.info("Loaded on Device!")
 
262
  pipe = None
263
 
264
  with gr.Blocks(css="style.css") as demo:
 
 
265
  gr.Markdown(DESCRIPTION)
266
 
267
  gr.DuplicateButton(
 
366
  json_input = gr.TextArea(label="Edit/Paste JSON Parameters", placeholder="Paste or edit JSON parameters here")
367
  generate_from_json = gr.Button("Generate from JSON")
368
 
369
+ with gr.Accordion("Randomize", open=False):
370
+ random_character_button = gr.Button("Random Character")
371
+
372
  with gr.Accordion("Generation History", open=False) as history_accordion:
373
  history_gallery = gr.Gallery(
374
  label="History",
 
463
  inputs=inputs,
464
  outputs=[result, gr_metadata, history_gallery],
465
  )
466
+
467
+ random_character_button.click(
468
+ fn=get_random_character,
469
+ inputs=[],
470
+ outputs=[prompt]
471
+ )
472
+
473
+ history_gallery.select(
474
+ fn=handle_image_click,
475
+ inputs=[],
476
+ outputs=[selected_image, selected_metadata]
477
+ )
478
 
479
  demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)