Menyu commited on
Commit
b16a39a
1 Parent(s): 9397932

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  import numpy as np
4
  import spaces
5
  import torch
6
- from diffusers import AutoPipelineForText2Image, AutoencoderKL, EulerDiscreteScheduler
7
  from compel import Compel, ReturnedEmbeddingsType
8
 
9
  import re
@@ -225,12 +225,12 @@ MAX_SEED = np.iinfo(np.int32).max
225
  MAX_IMAGE_SIZE = 4096
226
 
227
  if torch.cuda.is_available():
228
- vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
229
- pipe = AutoPipelineForText2Image.from_pretrained(
230
- "Menyu/noobaiXLNAIXL_vPred05Version",
231
  vae=vae,
232
- torch_dtype=torch.float16,
233
  use_safetensors=True,
 
234
  add_watermarker=False
235
  )
236
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
@@ -306,7 +306,7 @@ footer {
306
 
307
  with gr.Blocks(css=css) as demo:
308
  gr.Markdown("""# 梦羽的模型生成器
309
- ### 快速生成NoobAIXL V预测版本的模型图片""")
310
  with gr.Group():
311
  with gr.Row():
312
  prompt = gr.Text(
 
3
  import numpy as np
4
  import spaces
5
  import torch
6
+ from diffusers import StableDiffusionXLPipeline, AutoencoderKL, EulerDiscreteScheduler
7
  from compel import Compel, ReturnedEmbeddingsType
8
 
9
  import re
 
225
  MAX_IMAGE_SIZE = 4096
226
 
227
  if torch.cuda.is_available():
228
+ vae = StableDiffusionXLPipeline.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
229
+ pipe = StableDiffusionXLPipeline.from_single_file(
230
+ "https://huggingface.co/Laxhar/noobai-XL-Vpred-0.6/blob/main/noobai-xl-vpred-v0.6.safetensors",
231
  vae=vae,
 
232
  use_safetensors=True,
233
+ torch_dtype=torch.float16,
234
  add_watermarker=False
235
  )
236
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
 
306
 
307
  with gr.Blocks(css=css) as demo:
308
  gr.Markdown("""# 梦羽的模型生成器
309
+ ### 快速生成NoobAIXL V预测0.6版本的模型图片""")
310
  with gr.Group():
311
  with gr.Row():
312
  prompt = gr.Text(