hugosousa commited on
Commit
128a8c0
1 Parent(s): 98b5abe

Upload model.

Browse files
Files changed (4) hide show
  1. adapter_0.pt +3 -0
  2. config.json +1 -0
  3. hf_model_0001_0.pt +3 -0
  4. hf_model_0002_0.pt +3 -0
adapter_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7772be6974e30ed6e97e5d206577620f2da96c0f3df157ee2798e547a1858676
3
+ size 436943418
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_name_or_path": "Phi-3-mini-4k-instruct", "architectures": ["Phi3ForCausalLM"], "attention_dropout": 0.0, "auto_map": {"AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"}, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 4096, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "sliding_window": 2047, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.39.3", "use_cache": true, "vocab_size": 32064}
hf_model_0001_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2aa354ebb206b4fce7192445ef0fc7fdcde5a43fa37bc60795090aa8d10e5780
3
+ size 4972518334
hf_model_0002_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68527b7b828581763999131826479be4699560a097698b1918dcf2bb70418b13
3
+ size 2669707717