Menyu commited on
Commit
6a5424e
1 Parent(s): 3a7ee34

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -10,6 +10,7 @@ if not torch.cuda.is_available():
10
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是此项目只支持GPU.</p>"
11
 
12
  MAX_SEED = np.iinfo(np.int32).max
 
13
  MAX_IMAGE_SIZE = 4096
14
 
15
  if torch.cuda.is_available():
@@ -63,8 +64,8 @@ def infer(
63
  return image, seed
64
 
65
  examples = [
66
- "a cat eating a piece of cheese",
67
- "a ROBOT riding a BLUE horse on Mars, photorealistic, 4k",
68
  ]
69
 
70
  css = '''
@@ -139,6 +140,14 @@ with gr.Blocks(css=css) as demo:
139
  value=28,
140
  )
141
 
 
 
 
 
 
 
 
 
142
  use_negative_prompt.change(
143
  fn=lambda x: gr.update(visible=x),
144
  inputs=use_negative_prompt,
 
10
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是此项目只支持GPU.</p>"
11
 
12
  MAX_SEED = np.iinfo(np.int32).max
13
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
14
  MAX_IMAGE_SIZE = 4096
15
 
16
  if torch.cuda.is_available():
 
64
  return image, seed
65
 
66
  examples = [
67
+ "nahida (genshin impact)",
68
+ "klee (genshin impact)",
69
  ]
70
 
71
  css = '''
 
140
  value=28,
141
  )
142
 
143
+ gr.Examples(
144
+ examples=examples,
145
+ inputs=prompt,
146
+ outputs=[result, seed],
147
+ fn=generate,
148
+ cache_examples=CACHE_EXAMPLES,
149
+ )
150
+
151
  use_negative_prompt.change(
152
  fn=lambda x: gr.update(visible=x),
153
  inputs=use_negative_prompt,