fffiloni commited on
Commit
0efd3ee
1 Parent(s): cf2d0d0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -174,9 +174,10 @@ def reset_inference_state():
174
  torch.cuda.empty_cache()
175
  gc.collect()
176
 
177
- def infer(style_description, ref_style_file, caption):
178
  global models_rbm, models_b
179
  try:
 
180
  height=1024
181
  width=1024
182
  batch_size=1
@@ -270,7 +271,9 @@ def infer(style_description, ref_style_file, caption):
270
 
271
  finally:
272
  # Reset the state after inference, regardless of success or failure
273
- reset_inference_state()
 
 
274
 
275
  def reset_compo_inference_state():
276
  global models_rbm, models_b, extras, extras_b, device, core, core_b, sam_model
@@ -411,8 +414,10 @@ def infer_compo(style_description, ref_style_file, caption, ref_sub_file):
411
 
412
  finally:
413
  # Reset the state after inference, regardless of success or failure
414
- reset_compo_inference_state()
415
- reset_inference_state()
 
 
416
 
417
  def run(style_reference_image, style_description, subject_prompt, subject_reference, use_subject_ref):
418
  result = None
 
174
  torch.cuda.empty_cache()
175
  gc.collect()
176
 
177
+ def infer(ref_style_file, style_description, caption):
178
  global models_rbm, models_b
179
  try:
180
+ caption = f"{caption} in {style_description}"
181
  height=1024
182
  width=1024
183
  batch_size=1
 
271
 
272
  finally:
273
  # Reset the state after inference, regardless of success or failure
274
+ # reset_inference_state()
275
+ # Unload models and clear cache after inference
276
+ unload_models_and_clear_cache()
277
 
278
  def reset_compo_inference_state():
279
  global models_rbm, models_b, extras, extras_b, device, core, core_b, sam_model
 
414
 
415
  finally:
416
  # Reset the state after inference, regardless of success or failure
417
+ # reset_compo_inference_state()
418
+ # reset_inference_state()
419
+ # Unload models and clear cache after inference
420
+ unload_models_and_clear_cache()
421
 
422
  def run(style_reference_image, style_description, subject_prompt, subject_reference, use_subject_ref):
423
  result = None