asenella commited on
Commit
00555fa
1 Parent(s): 56fb182

Uploading MVTCAE in asenella/translated_mmnist_resnets_2_MVTCAE_ybzl2ssy

Browse files
Files changed (6) hide show
  1. README.md +13 -0
  2. decoders.pkl +3 -0
  3. encoders.pkl +3 -0
  4. environment.json +1 -0
  5. model.pt +3 -0
  6. model_config.json +1 -0
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: en
3
+ tags:
4
+ - multivae
5
+ license: apache-2.0
6
+ ---
7
+
8
+ ### Downloading this model from the Hub
9
+ This model was trained with multivae. It can be downloaded or reloaded using the method `load_from_hf_hub`
10
+ ```python
11
+ >>> from multivae.models import AutoModel
12
+ >>> model = AutoModel.load_from_hf_hub(hf_hub_path="your_hf_username/repo_name")
13
+ ```
decoders.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:581ec12965d938485628bfa747aa51f498fe5ce4f562bbb6d658f0a32fa06a96
3
+ size 63893985
encoders.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d63a26c8b129e78b58235c63c6da61a208b36703a5e412cf52f626d6ec918f9
3
+ size 113836419
environment.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "EnvironmentConfig", "python_version": "3.10"}
model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4395dd3173731e3a57fb3564ae59ebc787bb4cc3935380e9944c377054eb5639
3
+ size 177646002
model_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "MVTCAEConfig", "n_modalities": 5, "latent_dim": 200, "input_dims": {"m0": [3, 28, 28], "m1": [3, 28, 28], "m2": [3, 28, 28], "m3": [3, 28, 28], "m4": [3, 28, 28]}, "uses_likelihood_rescaling": false, "rescale_factors": null, "decoders_dist": {"m0": "laplace", "m1": "laplace", "m2": "laplace", "m3": "laplace", "m4": "laplace"}, "decoder_dist_params": {"m0": {"scale": 0.75}, "m1": {"scale": 0.75}, "m2": {"scale": 0.75}, "m3": {"scale": 0.75}, "m4": {"scale": 0.75}}, "logits_to_std": "softplus", "custom_architectures": ["encoders", "decoders"], "alpha": 0.8333333333333334, "beta": 1.0}