phython96 commited on
Commit
34e18c2
·
verified ·
1 Parent(s): bdda9f6

Push model using huggingface_hub.

Browse files
Files changed (3) hide show
  1. README.md +9 -0
  2. config.json +54 -0
  3. model.safetensors +3 -0
README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - model_hub_mixin
4
+ - pytorch_model_hub_mixin
5
+ ---
6
+
7
+ This model has been pushed to the Hub using the [PytorchModelHubMixin](https://huggingface.co/docs/huggingface_hub/package_reference/mixins#huggingface_hub.PyTorchModelHubMixin) integration:
8
+ - Library: [More Information Needed]
9
+ - Docs: [More Information Needed]
config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_space": null,
3
+ "freeze_mineclip": true,
4
+ "mineclip_kwargs": {
5
+ "arch": "vit_base_p16_fz.v2.t2",
6
+ "hidden_dim": 512,
7
+ "image_feature_dim": 512,
8
+ "mlp_adapter_spec": "v0-2.t0",
9
+ "pool_type": "attn.d2.nh8.glusw",
10
+ "resolution": [
11
+ 160,
12
+ 256
13
+ ]
14
+ },
15
+ "policy_kwargs": {
16
+ "attention_heads": 16,
17
+ "attention_mask_style": "clipped_causal",
18
+ "attention_memory_size": 256,
19
+ "diff_mlp_embedding": false,
20
+ "hidsize": 2048,
21
+ "img_shape": [
22
+ 128,
23
+ 128,
24
+ 3
25
+ ],
26
+ "impala_chans": [
27
+ 16,
28
+ 32,
29
+ 32
30
+ ],
31
+ "impala_kwargs": {
32
+ "post_pool_groups": 1
33
+ },
34
+ "impala_width": 8,
35
+ "init_norm_kwargs": {
36
+ "batch_norm": false,
37
+ "group_norm_groups": 1
38
+ },
39
+ "n_recurrence_layers": 4,
40
+ "only_img_input": true,
41
+ "pointwise_ratio": 4,
42
+ "pointwise_use_activation": false,
43
+ "recurrence_is_residual": true,
44
+ "recurrence_type": "transformer",
45
+ "timesteps": 128,
46
+ "use_pointwise_layer": true,
47
+ "use_pre_lstm_ln": false
48
+ },
49
+ "prior_kwargs": {
50
+ "hidden_dim": 512,
51
+ "input_dim": 512,
52
+ "latent_dim": 512
53
+ }
54
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71cbfc167ac6d0da85797186f758fc0d3ef8d2a9419e7dfaf7be4f3e10920d18
3
+ size 1641918400