ybelkada commited on
Commit
76081f9
1 Parent(s): e88a7eb

Upload BloomForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +8 -0
config.json CHANGED
@@ -21,6 +21,14 @@
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 2,
 
 
 
 
 
 
 
 
24
  "seq_length": 4096,
25
  "skip_bias_add": true,
26
  "skip_bias_add_qkv": false,
 
21
  "offset_alibi": 100,
22
  "pad_token_id": 3,
23
  "pretraining_tp": 2,
24
+ "quantization_config": {
25
+ "_from_model_config": false,
26
+ "llm_int8_enable_fp32_cpu_offload": false,
27
+ "llm_int8_skip_modules": null,
28
+ "llm_int8_threshold": 6.0,
29
+ "load_in_8bit": true,
30
+ "transformers_version": "4.28.0.dev0"
31
+ },
32
  "seq_length": 4096,
33
  "skip_bias_add": true,
34
  "skip_bias_add_qkv": false,