KingNish commited on
Commit
88a1b07
1 Parent(s): f87fd5b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -6
app.py CHANGED
@@ -15,7 +15,7 @@ if not torch.cuda.is_available():
15
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
16
 
17
  MAX_SEED = np.iinfo(np.int32).max
18
- CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
19
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
20
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
21
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
@@ -70,11 +70,7 @@ def generate(
70
  ):
71
  pipe.to(device)
72
  seed = int(randomize_seed_fn(seed, randomize_seed))
73
- generator = torch.Generator().manual_seed(seed)
74
-
75
- if not use_negative_prompt:
76
- negative_prompt = "" # type: ignore
77
- negative_prompt += default_negative
78
 
79
  options = {
80
  "prompt":prompt,
 
15
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
16
 
17
  MAX_SEED = np.iinfo(np.int32).max
18
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
19
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
20
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
21
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
 
70
  ):
71
  pipe.to(device)
72
  seed = int(randomize_seed_fn(seed, randomize_seed))
73
+ generator = torch.Generator().manual_seed(seed)
 
 
 
 
74
 
75
  options = {
76
  "prompt":prompt,