File size: 1,124 Bytes
28e3a64 20b73e6 28e3a64 20b73e6 28e3a64 20b73e6 28e3a64 20b73e6 28e3a64 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
{
"_name_or_path": "ruffy369/iris-jamesbond",
"architectures": [
"IrisModel"
],
"attention": "causal",
"attn_pdrop": 0.1,
"attn_resolutions": [
8,
16
],
"bos_token_id": 50256,
"ch": 64,
"ch_mult": [
1,
1,
1,
1,
1
],
"device": "cuda:0",
"dropout": 0.0,
"embed_dim_discrete_autoencoder": 512,
"embed_dim_world_model": 256,
"embed_pdrop": 0.1,
"entropy_weight": 0.001,
"eos_token_id": 50256,
"gamma": 0.995,
"grad_acc_steps_actor_critic": 1,
"grad_acc_steps_discrete_autoencoder": 1,
"grad_acc_steps_world_model": 1,
"imagine_horizon_train_actor_critic": 20,
"in_channels": 3,
"initializer_range": 0.02,
"lambda_": 0.95,
"max_blocks": 20,
"max_tokens": 340,
"model_type": "iris",
"num_actions": 18,
"num_heads": 4,
"num_layers": 10,
"num_res_blocks": 2,
"out_ch": 3,
"pad_token_id": 1,
"resid_pdrop": 0.1,
"resolution": 64,
"sequence_length": 20,
"tokens_per_block": 17,
"transformers_version": "4.44.0.dev0",
"use_cache": true,
"use_original_obs_actor_critic": false,
"vocab_size": 512,
"z_channels": 512
} |