jbilcke-hf HF staff commited on
Commit
7bd089d
·
1 Parent(s): 2d87969

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -14
app.py CHANGED
@@ -16,8 +16,7 @@ if not torch.cuda.is_available():
16
  DESCRIPTION += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
- CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv(
20
- 'CACHE_EXAMPLES') == '1'
21
  MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
22
  USE_TORCH_COMPILE = os.getenv('USE_TORCH_COMPILE') == '1'
23
  ENABLE_CPU_OFFLOAD = os.getenv('ENABLE_CPU_OFFLOAD') == '1'
@@ -127,12 +126,6 @@ def generate(prompt: str,
127
  generator=generator).images[0]
128
  return image
129
 
130
-
131
- examples = [
132
- 'Astronaut in a jungle, cold color palette, muted colors, detailed, 8k',
133
- 'An astronaut riding a green horse',
134
- ]
135
-
136
  with gr.Blocks(css='style.css') as demo:
137
  gr.Markdown(DESCRIPTION)
138
  with gr.Box():
@@ -226,12 +219,6 @@ with gr.Blocks(css='style.css') as demo:
226
  step=1,
227
  value=50)
228
 
229
- gr.Examples(examples=examples,
230
- inputs=prompt,
231
- outputs=result,
232
- fn=generate,
233
- cache_examples=CACHE_EXAMPLES)
234
-
235
  use_negative_prompt.change(
236
  fn=lambda x: gr.update(visible=x),
237
  inputs=use_negative_prompt,
 
16
  DESCRIPTION += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
+
 
20
  MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
21
  USE_TORCH_COMPILE = os.getenv('USE_TORCH_COMPILE') == '1'
22
  ENABLE_CPU_OFFLOAD = os.getenv('ENABLE_CPU_OFFLOAD') == '1'
 
126
  generator=generator).images[0]
127
  return image
128
 
 
 
 
 
 
 
129
  with gr.Blocks(css='style.css') as demo:
130
  gr.Markdown(DESCRIPTION)
131
  with gr.Box():
 
219
  step=1,
220
  value=50)
221
 
 
 
 
 
 
 
222
  use_negative_prompt.change(
223
  fn=lambda x: gr.update(visible=x),
224
  inputs=use_negative_prompt,