NeMo
okuchaiev commited on
Commit
a317464
1 Parent(s): f2fe342

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +1 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/71.0.2 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_15_96.pt +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_1_96.pt +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_20_96.pt +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_21_96.pt +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_24_96.pt +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_26_96.pt +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_2_96.pt +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_37_96.pt +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_3_96.pt +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_41_96.pt +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_49_96.pt +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_51_96.pt +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_52_96.pt +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_53_96.pt +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_55_96.pt +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_56_96.pt +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_64_96.pt +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_6_96.pt +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_70_96.pt +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_74_96.pt +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_83_96.pt +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_88_96.pt +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_92_96.pt +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_94_96.pt +3 -0
.gitattributes CHANGED
@@ -1533,3 +1533,4 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/67.0.0 filt
1533
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.6 filter=lfs diff=lfs merge=lfs -text
1534
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/86.0.5 filter=lfs diff=lfs merge=lfs -text
1535
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.5 filter=lfs diff=lfs merge=lfs -text
 
 
1533
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.6 filter=lfs diff=lfs merge=lfs -text
1534
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/86.0.5 filter=lfs diff=lfs merge=lfs -text
1535
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.5 filter=lfs diff=lfs merge=lfs -text
1536
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/71.0.2 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/71.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d757b3adaf556b0e3e228708c5dd5f85fb804565516db3108364ecbee8b79b1f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_15_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7540850e870ca58f5dbe7b6779eadabf69665d3081b3afd3104a278c3dada8e8
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_1_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:960e32a119248151d4cfcc38d1ee9aeaff2c5dcfc6acbd7947127c3886bccebd
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_20_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1805133c804db005b1f4a478e07588a88f548880e2fc453f83e7644f8c0f2db8
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_21_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba9cf0aa9d4ba429fca6ef4999cab7c3f23988bd11cb2d64d61d0ff667cf45ba
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_24_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5179bed2b5c6e388ef22488356cbfa2ad38fad93d26689f5d1931fb21a15f18
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_26_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b86f7738144dd46ad4ffe4c912a086e12834afbbaebe129b22051ddbe796be2
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_2_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d577e00499f440dc15d0548a709f723869bdf605e0545cac77c538f86be47d79
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_37_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb3a82897ba8dc68ab402af0b948ae54690e9855b3dbc9f58d88bef858704fed
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_3_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:728bd75592958705378d69f35359d640cda1f742ee5dc7197edc708d86e458b3
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_41_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:214b5869ba1179cdceeff980f2cf65ca7ab650c06b38d2f93b625f2f3a380c58
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_49_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cabdcbeee1b045eb2bc0e6250a49fd397ab59f81b3495065dd1349cc50050a52
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_51_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96b476bf2e0760c9cbc2c6a3921b56833532ce1adc234bb5842a575f27b209a3
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_52_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4da794ebf6d3e382c4d891c6de84821e639d5e3913acd3ce4ce2aabda4a0be5e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_53_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f3cba55163f9cb2eb4ef8ef5e6bed32df6701f2ee0ecf5e9e8768eece137ceb
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_55_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:933b45a0255ffbf603631df86198c897aed451e359ea27c8dbab3a43ce6eb6eb
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_56_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8ca19127b984ea748cd60fd108954c7dd846a8621056e256f53f3c7682bff95
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_64_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2ec57d9b44038ebea927087321c80b4dfab6365b9485fc4d4d165431a5aea47
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_6_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4dc039da97daf76c738a1e6cadbe5f420e5ae9813fff7d52383f7a8a6607da5
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_70_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ba952db94faa05deee32656353d5863405c51b2db00d2765429a94e5176d4bd
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_74_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00c62a9494edec5a80785562601134480011a9c9d271f0cc98c9eb2f3b397f52
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_83_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9afd69b274ff9b5d4b11ca94ecbd70d53374ca5d0adcc8f752ab4eebe2092be
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_88_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:882291292bd8fa87f3cc5efb0b518fb98a28f78d991be5774119ecaaa2bd41b7
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_92_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67846f3f0937c288adde34ffff89074cb52bf3d754c97aa8e129de0a0eaa33fd
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_94_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a15a899861ca2803444e551521376d87cbe33c95038243d97b7e65c2e7ce21c
3
+ size 1840