jorgeortizfuentes commited on
Commit
5509a51
1 Parent(s): 25f961f

Upload BloomForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -1
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bigscience/bloom-1b1",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
@@ -21,6 +21,7 @@
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 1,
 
24
  "skip_bias_add": true,
25
  "skip_bias_add_qkv": false,
26
  "slow_but_exact": false,
 
1
  {
2
+ "_name_or_path": "bigscience/bloomz-1b1",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
 
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 1,
24
+ "seq_length": 2048,
25
  "skip_bias_add": true,
26
  "skip_bias_add_qkv": false,
27
  "slow_but_exact": false,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffce15d6feaeb2cc35e4327d1d505170e67f1f9dd6df5bc9e43d51095426f4da
3
  size 1062689
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94633989cc304835a1b4199c63efa2c418c7fb3f292cdf94c5c3417109823495
3
  size 1062689