Spaces:
LAOS-Y
/
Running on Zero

LAOS-Y commited on
Commit
9aa2be5
1 Parent(s): 3343f7b

add dataset previewer

Browse files
Files changed (1) hide show
  1. edit_app.py +33 -0
edit_app.py CHANGED
@@ -7,6 +7,7 @@ from glob import glob
7
  import gradio as gr
8
  import torch
9
  from PIL import Image, ImageOps
 
10
  from diffusers import StableDiffusionInstructPix2PixPipeline, EulerAncestralDiscreteScheduler
11
 
12
 
@@ -97,6 +98,18 @@ def main():
97
  # Retrieve the image file path from the dictionary based on the selected name
98
  return image_options[image_name]
99
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  with gr.Blocks() as demo:
101
  gr.HTML("""<h1 style="font-weight: 900; margin-bottom: 7px;">
102
  HQ-Edit: A High-Quality and High-Coverage Dataset for General Image Editing
@@ -145,6 +158,20 @@ def main():
145
 
146
  gr.Markdown(help_text)
147
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
  dropdown.change(show_image, inputs=dropdown, outputs=input_image)
149
 
150
  generate_button.click(
@@ -167,6 +194,12 @@ def main():
167
  outputs=[steps, randomize_seed, seed, randomize_cfg, text_cfg_scale, image_cfg_scale, edited_image],
168
  )
169
 
 
 
 
 
 
 
170
  demo.queue()
171
  demo.launch(share=True, max_threads=1)
172
 
 
7
  import gradio as gr
8
  import torch
9
  from PIL import Image, ImageOps
10
+ from datasets import load_dataset
11
  from diffusers import StableDiffusionInstructPix2PixPipeline, EulerAncestralDiscreteScheduler
12
 
13
 
 
98
  # Retrieve the image file path from the dictionary based on the selected name
99
  return image_options[image_name]
100
 
101
+ dataset = load_dataset("UCSC-VLAA/HQ-Edit-data-demo")
102
+
103
+ def sample():
104
+ sample_id = random.choice(list(range(len(dataset["train"]))))
105
+ sample = dataset["train"][sample_id]
106
+ return [sample["input_image"], sample["output_image"], sample["edit"], sample["inverse_edit"]]
107
+
108
+ def show_large_image(image_info):
109
+ # Returns the PIL image and caption for larger display
110
+ # return image_info['image'], image_info['caption']
111
+ return image_info
112
+
113
  with gr.Blocks() as demo:
114
  gr.HTML("""<h1 style="font-weight: 900; margin-bottom: 7px;">
115
  HQ-Edit: A High-Quality and High-Coverage Dataset for General Image Editing
 
158
 
159
  gr.Markdown(help_text)
160
 
161
+ with gr.Row():
162
+ gr.Markdown("## Dataset Preview")
163
+ sample_button = gr.Button("See Another Sample")
164
+
165
+ with gr.Row():
166
+ # Set up the Gallery component with a specific number of columns
167
+ # gallery = gr.Gallery(value=image_data, label="Image Gallery", type="pil", columns=2)
168
+ # Display for larger image
169
+ input_image_preview = gr.Image(label="Input Image", type="pil", height=512, width=512)
170
+ output_image_preview = gr.Image(label="Output Image", type="pil", height=512, width=512)
171
+
172
+ edit_text = gr.Textbox(label="Edit Instruction")
173
+ inv_edit_text = gr.Textbox(label="Inverse Edit Instruction")
174
+
175
  dropdown.change(show_image, inputs=dropdown, outputs=input_image)
176
 
177
  generate_button.click(
 
194
  outputs=[steps, randomize_seed, seed, randomize_cfg, text_cfg_scale, image_cfg_scale, edited_image],
195
  )
196
 
197
+ sample_button.click(
198
+ fn=sample,
199
+ inputs=[],
200
+ outputs=[input_image_preview, output_image_preview, edit_text, inv_edit_text]
201
+ )
202
+
203
  demo.queue()
204
  demo.launch(share=True, max_threads=1)
205