multimodalart HF staff commited on
Commit
832238b
1 Parent(s): a36ac2f

Fix empty else

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -115,7 +115,7 @@ def generate(prompt, in_embs=None,):
115
  # # print(prompt)
116
  in_embs = in_embs / in_embs.abs().max() * .15 if in_embs != None else None
117
  in_embs = .9 * in_embs.to('cuda') + .5 * autoencoder.embed(prompt).to('cuda') if in_embs != None else autoencoder.embed(prompt).to('cuda')
118
- else:
119
  # print('From embeds.')
120
  in_embs = in_embs / in_embs.abs().max() * .15
121
  text = autoencoder.generate_from_latent(in_embs.to('cuda').to(dtype=torch.bfloat16), temperature=.3, top_p=.99, min_new_tokens=5)
 
115
  # # print(prompt)
116
  in_embs = in_embs / in_embs.abs().max() * .15 if in_embs != None else None
117
  in_embs = .9 * in_embs.to('cuda') + .5 * autoencoder.embed(prompt).to('cuda') if in_embs != None else autoencoder.embed(prompt).to('cuda')
118
+ #else:
119
  # print('From embeds.')
120
  in_embs = in_embs / in_embs.abs().max() * .15
121
  text = autoencoder.generate_from_latent(in_embs.to('cuda').to(dtype=torch.bfloat16), temperature=.3, top_p=.99, min_new_tokens=5)