hugosousa commited on
Commit
6df7210
1 Parent(s): 766ce11

Upload model.

Browse files
Files changed (4) hide show
  1. adapter_0.pt +3 -0
  2. config.json +1 -0
  3. hf_model_0001_0.pt +3 -0
  4. hf_model_0002_0.pt +3 -0
adapter_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39e65031a842222ae0f0f8161e704fe5f7c40cf3672a0ed947aac69729a116c8
3
+ size 436943418
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_name_or_path": "Phi-3-mini-4k-instruct", "architectures": ["Phi3ForCausalLM"], "attention_dropout": 0.0, "auto_map": {"AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"}, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 4096, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "sliding_window": 2047, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.39.3", "use_cache": true, "vocab_size": 32064}
hf_model_0001_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed1bee4036173d740353e826672eb1a528a3b9830eb7fbda13660c90a998420b
3
+ size 4972518334
hf_model_0002_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90f25f031a08deb5a60f6cc741c994c335c6420118d46f7a22fc498f35482389
3
+ size 2669707717