manbeast3b commited on
Commit
49e6e93
·
verified ·
1 Parent(s): 5396446

Update src/pipeline.py

Browse files
Files changed (1) hide show
  1. src/pipeline.py +7 -7
src/pipeline.py CHANGED
@@ -45,16 +45,16 @@ def _load_text_encoder_model():
45
 
46
  def _load_vae_model():
47
  """Load the variational autoencoder (VAE) model with specified configuration."""
48
- # return AutoencoderTiny.from_pretrained(
49
- # "RobertML/FLUX.1-schnell-vae_e3m2",
50
- # revision="da0d2cd7815792fb40d084dbd8ed32b63f153d8d",
51
- # torch_dtype=Config.DTYPE
52
- # )
53
  return AutoencoderTiny.from_pretrained(
54
- "manbeast3b/FLUX.1-schnell-taef1-float8",
55
- revision="7c538d53ec698509788ed88b1305c6bb019bdb4d",
56
  torch_dtype=Config.DTYPE
57
  )
 
 
 
 
 
58
 
59
  def _load_transformer_model():
60
  """Load the transformer model from a specific cached path."""
 
45
 
46
  def _load_vae_model():
47
  """Load the variational autoencoder (VAE) model with specified configuration."""
 
 
 
 
 
48
  return AutoencoderTiny.from_pretrained(
49
+ "RobertML/FLUX.1-schnell-vae_e3m2",
50
+ revision="da0d2cd7815792fb40d084dbd8ed32b63f153d8d",
51
  torch_dtype=Config.DTYPE
52
  )
53
+ # return AutoencoderTiny.from_pretrained(
54
+ # "manbeast3b/FLUX.1-schnell-taef1-float8",
55
+ # revision="7c538d53ec698509788ed88b1305c6bb019bdb4d",
56
+ # torch_dtype=Config.DTYPE
57
+ # )
58
 
59
  def _load_transformer_model():
60
  """Load the transformer model from a specific cached path."""