salomonsky commited on
Commit
ba7b66b
verified
1 Parent(s): 87bf9bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -83
app.py CHANGED
@@ -8,35 +8,24 @@ import os
8
  import random
9
  import numpy as np
10
  import yaml
11
- import traceback
12
- import asyncio
13
 
14
- TEMP_PATH = Path("./temp")
15
- TEMP_PATH.mkdir(parents=True, exist_ok=True)
16
- st.set_page_config(layout="wide")
17
- llm_client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=os.getenv("HF_TOKEN_UPSCALER"))
 
 
18
 
19
- def login_form():
20
- st.title("Iniciar Sesi贸n")
21
- username = st.text_input("Usuario")
22
- password = st.text_input("Contrase帽a", type="password")
23
-
24
- if st.button("Iniciar Sesi贸n"):
25
- if username == "admin" and password == "flux3x":
26
- st.session_state['authenticated'] = True
27
- st.success("Autenticaci贸n exitosa.")
28
- else:
29
- st.error("Credenciales incorrectas.")
30
 
31
  if not HF_TOKEN_UPSCALER:
32
  st.warning("HF_TOKEN_UPSCALER no est谩 configurado. Algunas funcionalidades pueden no funcionar.")
33
 
34
- def handle_file(file_path):
35
- if isinstance(file_path, (str, Path)):
36
- with open(file_path, 'rb') as f:
37
- return f.read()
38
- return file_path
39
-
40
  def get_upscale_finegrain(prompt, img_path, upscale_factor):
41
  try:
42
  upscale_client = InferenceClient("fal/AuraSR-v2", hf_token=HF_TOKEN_UPSCALER)
@@ -136,53 +125,10 @@ def save_prompt(prompt):
136
  f.write(prompt + "\n")
137
  st.success("Prompt guardado.")
138
 
139
- async def improve_prompt(prompt):
140
- if not prompt.strip() or not llm_client:
141
- return prompt
142
-
143
- try:
144
- with st.spinner('Generando y mejorando prompts...'):
145
- instructions = [
146
- "With this words, create a photorealistic description for a detailed txt2img prompt in English in 200 characters maximum",
147
- "With this idea, write a creative, realistic, and detailed text-to-image prompt in English in 200 characters maximum",
148
- "With this text, generate a descriptive and True to life txt2img prompt in English in 200 characters maximum",
149
- "With my idea, describe a photorealistic scene with detailed illumination for a txt2img prompt in English in 200 characters maximum",
150
- "With this concept, give a realistic, elegant txt2img prompt in English, emphasizing photorealism in 200 characters maximum",
151
- "With this perspective, conform a visually dynamic and hyperrealistic txt2img prompt in English in 200 characters maximum",
152
- "With this inspiration, realize a cinematic txt2img prompt in English with hyperrealistic elements in 200 characters maximum",
153
- "With my idea, make a lifelike and txt2img prompt in English, focusing on photorealistic depth in 200 characters maximum"
154
- ]
155
- result = llm_client(
156
- f"{prompt}: {random.choice(instructions)}",
157
- max_new_tokens=256,
158
- temperature=0.7,
159
- top_p=0.95,
160
- repetition_penalty=1.1,
161
- do_sample=True
162
- )
163
- generated_text = result[0]['generated_text'] if isinstance(result, list) else str(result)
164
- return generated_text[:200].strip() or prompt
165
- except Exception as e:
166
- st.error(f"Error al mejorar prompt: {str(e)}\n{traceback.format_exc()}")
167
- return prompt
168
-
169
- async def generate_variations(prompt, num_variants, use_enhanced):
170
  prompts = set()
171
- if not use_enhanced:
172
- return [prompt] * num_variants
173
-
174
- try:
175
- while len(prompts) < num_variants:
176
- enhanced_prompt = await improve_prompt(prompt)
177
- prompts.add(enhanced_prompt)
178
- if len(prompts) == 1 and num_variants > 1:
179
- prompts.add(prompt)
180
- if len(prompts) < num_variants:
181
- prompts.update([prompt] * (num_variants - len(prompts)))
182
- except Exception as e:
183
- st.error(f"Error generando variaciones: {e}")
184
- return [prompt] * num_variants
185
-
186
  return list(prompts)
187
 
188
  def get_prompt_for_image(image_name):
@@ -231,6 +177,7 @@ def upload_image_to_gallery():
231
  st.sidebar.success(f"Imagen subida: {image_path}")
232
 
233
  def main():
 
234
 
235
  if 'authenticated' not in st.session_state or not st.session_state['authenticated']:
236
  login_form()
@@ -242,22 +189,12 @@ def main():
242
  format_option = st.sidebar.selectbox("Formato", ["9:16", "16:9", "1:1"])
243
  model_option = st.sidebar.selectbox("Modelo", ["black-forest-labs/FLUX.1-schnell", "black-forest-labs/FLUX.1-dev"])
244
  upscale_checkbox = st.sidebar.checkbox("Escalar imagen")
245
- prompt_enhance = st.sidebar.checkbox("Mejorar Prompt", True)
246
- num_variants = st.sidebar.slider("N煤mero de im谩genes", 1, 8, 1)
247
  width, height = (720, 1280) if format_option == "9:16" else (1280, 720) if format_option == "16:9" else (1280, 1280)
248
-
249
- if prompt_enhance:
250
- prompts = asyncio.run(generate_variations(prompt, num_variants, use_enhanced=True))
251
- else:
252
- prompts = [prompt] * num_variants
253
 
254
  if st.sidebar.button("Generar Imagen"):
255
- try:
256
- images = gen(prompts, width, height, model_option, num_variants=num_variants)
257
- if not images:
258
- st.error("No se pudieron generar im谩genes")
259
- except Exception as e:
260
- st.error(f"Error durante la generaci贸n: {e}")
261
 
262
  if generated_image_path and upscale_checkbox:
263
  upscale_factor = st.sidebar.slider("Factor de Escalado", 1, 4, 2)
@@ -269,4 +206,4 @@ def main():
269
  display_gallery()
270
 
271
  if __name__ == "__main__":
272
- main()
 
8
  import random
9
  import numpy as np
10
  import yaml
 
 
11
 
12
+ try:
13
+ with open("config.yaml", "r") as file:
14
+ credentials = yaml.safe_load(file)
15
+ except Exception as e:
16
+ st.error(f"Error al cargar el archivo de configuraci贸n: {e}")
17
+ credentials = {"username": "", "password": ""}
18
 
19
+ MAX_SEED = np.iinfo(np.int32).max
20
+ client = InferenceClient()
21
+ DATA_PATH = Path("./data")
22
+ DATA_PATH.mkdir(exist_ok=True)
23
+ PREDEFINED_SEED = random.randint(0, MAX_SEED)
24
+ HF_TOKEN_UPSCALER = os.environ.get("HF_TOKEN_UPSCALER")
 
 
 
 
 
25
 
26
  if not HF_TOKEN_UPSCALER:
27
  st.warning("HF_TOKEN_UPSCALER no est谩 configurado. Algunas funcionalidades pueden no funcionar.")
28
 
 
 
 
 
 
 
29
  def get_upscale_finegrain(prompt, img_path, upscale_factor):
30
  try:
31
  upscale_client = InferenceClient("fal/AuraSR-v2", hf_token=HF_TOKEN_UPSCALER)
 
125
  f.write(prompt + "\n")
126
  st.success("Prompt guardado.")
127
 
128
+ def generate_variations(prompt, num_variants, use_enhanced):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
  prompts = set()
130
+ while len(prompts) < num_variants:
131
+ prompts.add(prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
132
  return list(prompts)
133
 
134
  def get_prompt_for_image(image_name):
 
177
  st.sidebar.success(f"Imagen subida: {image_path}")
178
 
179
  def main():
180
+ st.set_page_config(layout="wide")
181
 
182
  if 'authenticated' not in st.session_state or not st.session_state['authenticated']:
183
  login_form()
 
189
  format_option = st.sidebar.selectbox("Formato", ["9:16", "16:9", "1:1"])
190
  model_option = st.sidebar.selectbox("Modelo", ["black-forest-labs/FLUX.1-schnell", "black-forest-labs/FLUX.1-dev"])
191
  upscale_checkbox = st.sidebar.checkbox("Escalar imagen")
 
 
192
  width, height = (720, 1280) if format_option == "9:16" else (1280, 720) if format_option == "16:9" else (1280, 1280)
193
+ num_variants = 1
 
 
 
 
194
 
195
  if st.sidebar.button("Generar Imagen"):
196
+ prompts = [prompt]
197
+ images = gen(prompts, width, height, model_option, num_variants)
 
 
 
 
198
 
199
  if generated_image_path and upscale_checkbox:
200
  upscale_factor = st.sidebar.slider("Factor de Escalado", 1, 4, 2)
 
206
  display_gallery()
207
 
208
  if __name__ == "__main__":
209
+ main()