Pedro Cuenca commited on
Commit
33b6ed2
1 Parent(s): 6a1f9f1

Make inference slightly faster.

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -39,14 +39,14 @@ with gr.Blocks(css=".container { max-width: 800px; margin: auto; }") as demo:
39
  result = defaultdict(list)
40
  for _, seed in zip_longest(range(n), seeds, fillvalue=None):
41
  seed = random.randint(0, 2**32 - 1) if seed is None else seed
42
- print(f"Setting seed {seed}")
43
  _ = torch.manual_seed(seed)
44
- images = ldm(
45
- [prompt],
46
- num_inference_steps=STEPS,
47
- eta=ETA,
48
- guidance_scale=GUIDANCE_SCALE
49
- )["sample"]
 
50
  result["images"].append(images[0])
51
  result["seeds"].append(seed)
52
  return result["images"], result["seeds"]
 
39
  result = defaultdict(list)
40
  for _, seed in zip_longest(range(n), seeds, fillvalue=None):
41
  seed = random.randint(0, 2**32 - 1) if seed is None else seed
 
42
  _ = torch.manual_seed(seed)
43
+ with torch.autocast("cuda"):
44
+ images = ldm(
45
+ [prompt],
46
+ num_inference_steps=STEPS,
47
+ eta=ETA,
48
+ guidance_scale=GUIDANCE_SCALE
49
+ )["sample"]
50
  result["images"].append(images[0])
51
  result["seeds"].append(seed)
52
  return result["images"], result["seeds"]