fantaxy commited on
Commit
8ea8af0
ยท
verified ยท
1 Parent(s): 797e81e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -21
app.py CHANGED
@@ -6,10 +6,9 @@ import torch
6
  from diffusers import DiffusionPipeline
7
  from transformers import pipeline
8
 
9
- # ๋ฒˆ์—ญ ํŒŒ์ดํ”„๋ผ์ธ ๋ฐ ํ•˜๋“œ์›จ์–ด ์„ค์ •
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
11
  translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en", device=device)
12
-
13
  dtype = torch.bfloat16
14
  pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
15
 
@@ -21,14 +20,14 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
21
  if randomize_seed:
22
  seed = random.randint(0, MAX_SEED)
23
  generator = torch.Generator().manual_seed(seed)
24
-
25
- # ํ•œ๊ธ€ ์ž…๋ ฅ ๊ฐ์ง€ ๋ฐ ๋ฒˆ์—ญ
26
  if any('\uAC00' <= char <= '\uD7A3' for char in prompt):
27
- print("ํ•œ๊ตญ์–ด ํ”„๋กฌํ”„ํŠธ ๋ฒˆ์—ญ ์ค‘...")
28
  translated_prompt = translator(prompt, max_length=512)[0]['translation_text']
29
- print("๋ฒˆ์—ญ๋œ ํ”„๋กฌํ”„ํŠธ:", translated_prompt)
30
  prompt = translated_prompt
31
-
32
  image = pipe(
33
  prompt = prompt,
34
  width = width,
@@ -37,10 +36,9 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
37
  generator = generator,
38
  guidance_scale=0.0
39
  ).images[0]
 
40
  return image, seed
41
 
42
- # (์ด์ „ import ๊ตฌ๋ฌธ ๋ฐ ํŒŒ์ดํ”„๋ผ์ธ ์„ค์ • ์œ ์ง€)
43
-
44
  examples = [
45
  ["[ํ•œ๊ธ€] [์Šคํƒ€์ผ: ๋ชจ๋˜] [์ƒ‰์ƒ: ๋นจ๊ฐ•๊ณผ ๊ฒ€์ •] [์ปจ์…‰: ์‹๋‹น] [ํ…์ŠคํŠธ: '๋ง›์žˆ๋Š”์ง‘'] [๋ฐฐ๊ฒฝ: ์‹ฌํ”Œ]"],
46
  ["[Style: Corporate] [Color: Navy and Silver] [Concept: Finance] [Text: 'TRUST'] [Background: Professional]"],
@@ -63,25 +61,25 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
63
  with gr.Column(elem_id="container"):
64
  with gr.Group():
65
  prompt = gr.Text(
66
- label="๋กœ๊ณ  ์„ค๋ช…",
67
- placeholder="๋กœ๊ณ  ๋””์ž์ธ์„ ์ƒ์„ธํžˆ ์„ค๋ช…ํ•ด์ฃผ์„ธ์š” (ํ•œ๊ธ€ ์ž…๋ ฅ ๊ฐ€๋Šฅ)",
68
  lines=2
69
  )
70
- run_button = gr.Button("๋กœ๊ณ  ์ƒ์„ฑ", variant="primary")
71
 
72
  with gr.Row():
73
- result = gr.Image(label="์ƒ์„ฑ๋œ ๋กœ๊ณ ", show_label=True)
74
 
75
- with gr.Accordion("๊ณ ๊ธ‰ ์„ค์ •", open=False):
76
  with gr.Row():
77
- seed = gr.Slider(label="์‹œ๋“œ", minimum=0, maximum=MAX_SEED, step=1, value=0)
78
- randomize_seed = gr.Checkbox(label="๋žœ๋ค ์‹œ๋“œ", value=True)
79
 
80
  with gr.Row():
81
- width = gr.Slider(label="๋„ˆ๋น„", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=512)
82
- height = gr.Slider(label="๋†’์ด", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=512)
83
- num_inference_steps = gr.Slider(label="ํ’ˆ์งˆ", minimum=1, maximum=50, step=1, value=4)
84
-
85
  gr.Examples(
86
  examples=examples,
87
  fn=infer,
@@ -89,7 +87,7 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
89
  outputs=[result, seed],
90
  cache_examples="lazy"
91
  )
92
-
93
  gr.on(
94
  triggers=[run_button.click, prompt.submit],
95
  fn=infer,
 
6
  from diffusers import DiffusionPipeline
7
  from transformers import pipeline
8
 
9
+ # Translation pipeline and hardware settings
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
11
  translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en", device=device)
 
12
  dtype = torch.bfloat16
13
  pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
14
 
 
20
  if randomize_seed:
21
  seed = random.randint(0, MAX_SEED)
22
  generator = torch.Generator().manual_seed(seed)
23
+
24
+ # Korean input detection and translation
25
  if any('\uAC00' <= char <= '\uD7A3' for char in prompt):
26
+ print("Translating Korean prompt...")
27
  translated_prompt = translator(prompt, max_length=512)[0]['translation_text']
28
+ print("Translated prompt:", translated_prompt)
29
  prompt = translated_prompt
30
+
31
  image = pipe(
32
  prompt = prompt,
33
  width = width,
 
36
  generator = generator,
37
  guidance_scale=0.0
38
  ).images[0]
39
+
40
  return image, seed
41
 
 
 
42
  examples = [
43
  ["[ํ•œ๊ธ€] [์Šคํƒ€์ผ: ๋ชจ๋˜] [์ƒ‰์ƒ: ๋นจ๊ฐ•๊ณผ ๊ฒ€์ •] [์ปจ์…‰: ์‹๋‹น] [ํ…์ŠคํŠธ: '๋ง›์žˆ๋Š”์ง‘'] [๋ฐฐ๊ฒฝ: ์‹ฌํ”Œ]"],
44
  ["[Style: Corporate] [Color: Navy and Silver] [Concept: Finance] [Text: 'TRUST'] [Background: Professional]"],
 
61
  with gr.Column(elem_id="container"):
62
  with gr.Group():
63
  prompt = gr.Text(
64
+ label="PROMPT",
65
+ placeholder="Text input Prompt (Korean input supported)",
66
  lines=2
67
  )
68
+ run_button = gr.Button("Generate Logo", variant="primary")
69
 
70
  with gr.Row():
71
+ result = gr.Image(label="Generated Logo", show_label=True)
72
 
73
+ with gr.Accordion("Advanced Settings", open=False):
74
  with gr.Row():
75
+ seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0)
76
+ randomize_seed = gr.Checkbox(label="Random Seed", value=True)
77
 
78
  with gr.Row():
79
+ width = gr.Slider(label="Width", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=512)
80
+ height = gr.Slider(label="Height", minimum=256, maximum=MAX_IMAGE_SIZE, step=32, value=512)
81
+ num_inference_steps = gr.Slider(label="Quality", minimum=1, maximum=50, step=1, value=4)
82
+
83
  gr.Examples(
84
  examples=examples,
85
  fn=infer,
 
87
  outputs=[result, seed],
88
  cache_examples="lazy"
89
  )
90
+
91
  gr.on(
92
  triggers=[run_button.click, prompt.submit],
93
  fn=infer,